astrocode-workflow 0.1.9 → 0.1.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -215,6 +215,15 @@ export function createAstroAgents(opts) {
215
215
  prompt: BASE_STAGE_PROMPT,
216
216
  permission: stageReadOnlyPermissions(),
217
217
  }, "utility");
218
+ // Fallback general agent for delegation failures
219
+ agents["General"] = mk("General", {
220
+ description: "General-purpose fallback agent for delegation.",
221
+ mode: "subagent",
222
+ hidden: true,
223
+ temperature: 0.1,
224
+ prompt: BASE_STAGE_PROMPT,
225
+ permission: stageReadOnlyPermissions(),
226
+ }, "utility");
218
227
  // Allow user config to disable certain agents
219
228
  for (const disabled of pluginConfig.disabled_agents) {
220
229
  delete agents[disabled];
@@ -43,7 +43,8 @@ export function assertInsideAstro(repoRoot, filePath) {
43
43
  const abs = path.resolve(filePath);
44
44
  const astroRoot = path.resolve(path.join(repoRoot, ".astro"));
45
45
  if (!abs.startsWith(astroRoot + path.sep) && abs !== astroRoot) {
46
- throw new Error(`Refusing to write outside .astro: ${filePath}`);
46
+ const relPath = path.relative(repoRoot, filePath);
47
+ throw new Error(`Refusing to write outside .astro: ${filePath} (relative: ${relPath}, astroRoot: ${astroRoot})`);
47
48
  }
48
49
  if (!abs.startsWith(absRepo + path.sep) && abs !== absRepo) {
49
50
  throw new Error(`Refusing to write outside repo root: ${filePath}`);
package/dist/state/db.js CHANGED
@@ -2,6 +2,7 @@ import fs from "node:fs";
2
2
  import path from "node:path";
3
3
  import { SCHEMA_SQL, SCHEMA_VERSION } from "./schema";
4
4
  import { nowISO } from "../shared/time";
5
+ import { info } from "../shared/log";
5
6
  import { createDatabaseAdapter } from "./adapters";
6
7
  /** Ensure directory exists for a file path. */
7
8
  function ensureParentDir(filePath) {
@@ -55,6 +56,19 @@ export function ensureSchema(db, opts) {
55
56
  }
56
57
  try {
57
58
  db.exec(SCHEMA_SQL);
59
+ // Migrations for existing databases
60
+ // Add created_at to stage_runs if missing (introduced in schema version 2)
61
+ try {
62
+ const columns = db.prepare("PRAGMA table_info(stage_runs)").all();
63
+ const hasCreatedAt = columns.some(col => col.name === 'created_at');
64
+ if (!hasCreatedAt) {
65
+ db.exec("ALTER TABLE stage_runs ADD COLUMN created_at TEXT NOT NULL DEFAULT ''");
66
+ info("[Astrocode] Added created_at column to stage_runs table");
67
+ }
68
+ }
69
+ catch (e) {
70
+ // Column might already exist or table doesn't exist, ignore
71
+ }
58
72
  const row = db.prepare("SELECT schema_version FROM repo_state WHERE id = 1").get();
59
73
  if (!row) {
60
74
  const now = nowISO();
@@ -1,2 +1,2 @@
1
1
  export declare const SCHEMA_VERSION = 2;
2
- export declare const SCHEMA_SQL = "\nPRAGMA foreign_keys = ON;\n\nCREATE TABLE IF NOT EXISTS repo_state (\n id INTEGER PRIMARY KEY CHECK (id = 1),\n schema_version INTEGER NOT NULL,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL,\n spec_hash_before TEXT,\n spec_hash_after TEXT,\n last_run_id TEXT,\n last_story_key TEXT,\n last_event_at TEXT\n);\n\nCREATE TABLE IF NOT EXISTS settings (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS epics (\n epic_key TEXT PRIMARY KEY,\n title TEXT NOT NULL,\n body_md TEXT NOT NULL DEFAULT '',\n state TEXT NOT NULL DEFAULT 'active',\n priority INTEGER NOT NULL DEFAULT 0,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS story_drafts (\n draft_id TEXT PRIMARY KEY,\n title TEXT NOT NULL,\n body_md TEXT NOT NULL DEFAULT '',\n meta_json TEXT NOT NULL DEFAULT '{}',\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS story_keyseq (\n id INTEGER PRIMARY KEY CHECK (id = 1),\n next_story_num INTEGER NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS stories (\n story_key TEXT PRIMARY KEY,\n epic_key TEXT,\n title TEXT NOT NULL,\n body_md TEXT NOT NULL DEFAULT '',\n state TEXT NOT NULL DEFAULT 'queued', -- queued|approved|in_progress|done|blocked|archived\n priority INTEGER NOT NULL DEFAULT 0,\n approved_at TEXT,\n locked_by_run_id TEXT,\n locked_at TEXT,\n in_progress INTEGER NOT NULL DEFAULT 0,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL,\n FOREIGN KEY (epic_key) REFERENCES epics(epic_key)\n);\n\nCREATE TABLE IF NOT EXISTS runs (\n run_id TEXT PRIMARY KEY,\n story_key TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'created', -- created|running|completed|failed|aborted\n pipeline_stages_json TEXT NOT NULL DEFAULT '[]',\n current_stage_key TEXT,\n created_at TEXT NOT NULL,\n started_at TEXT,\n completed_at TEXT,\n updated_at TEXT NOT NULL,\n error_text TEXT,\n FOREIGN KEY (story_key) REFERENCES stories(story_key)\n);\n\nCREATE TABLE IF NOT EXISTS stage_runs (\n stage_run_id TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n stage_key TEXT NOT NULL,\n stage_index INTEGER NOT NULL,\n status TEXT NOT NULL DEFAULT 'pending', -- pending|running|completed|failed|skipped\n subagent_type TEXT,\n subagent_session_id TEXT,\n started_at TEXT,\n completed_at TEXT,\n updated_at TEXT NOT NULL,\n baton_path TEXT,\n summary_md TEXT,\n output_json TEXT,\n error_text TEXT,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS artifacts (\n artifact_id TEXT PRIMARY KEY,\n run_id TEXT,\n stage_key TEXT,\n type TEXT NOT NULL, -- plan|baton|evidence|diff|log|summary|commit|tool_output|snapshot\n path TEXT NOT NULL,\n sha256 TEXT,\n meta_json TEXT NOT NULL DEFAULT '{}',\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS tool_runs (\n tool_run_id TEXT PRIMARY KEY,\n run_id TEXT,\n stage_key TEXT,\n tool_name TEXT NOT NULL,\n args_json TEXT NOT NULL DEFAULT '{}',\n output_summary TEXT NOT NULL DEFAULT '',\n output_artifact_id TEXT,\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS events (\n event_id TEXT PRIMARY KEY,\n run_id TEXT,\n stage_key TEXT,\n type TEXT NOT NULL,\n body_json TEXT NOT NULL DEFAULT '{}',\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS injects (\n inject_id TEXT PRIMARY KEY,\n type TEXT NOT NULL DEFAULT 'note',\n title TEXT NOT NULL,\n body_md TEXT NOT NULL,\n tags_json TEXT NOT NULL DEFAULT '[]',\n scope TEXT NOT NULL DEFAULT 'repo', -- repo|run:<id>|story:<key>|global\n source TEXT NOT NULL DEFAULT 'user', -- user|tool|agent|import\n priority INTEGER NOT NULL DEFAULT 50,\n expires_at TEXT,\n sha256 TEXT,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS running_batches (\n batch_id TEXT PRIMARY KEY,\n run_id TEXT,\n session_id TEXT,\n status TEXT NOT NULL DEFAULT 'running', -- running|completed|failed|aborted\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS workflow_metrics (\n metric_id TEXT PRIMARY KEY,\n run_id TEXT,\n stage_key TEXT,\n name TEXT NOT NULL,\n value_num REAL,\n value_text TEXT,\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS template_intents (\n intent_key TEXT PRIMARY KEY,\n body_md TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\n-- vNext tables\n\nCREATE TABLE IF NOT EXISTS story_relations (\n parent_story_key TEXT NOT NULL,\n child_story_key TEXT NOT NULL,\n relation_type TEXT NOT NULL DEFAULT 'split',\n reason TEXT NOT NULL DEFAULT '',\n created_at TEXT NOT NULL,\n PRIMARY KEY (parent_story_key, child_story_key),\n FOREIGN KEY (parent_story_key) REFERENCES stories(story_key),\n FOREIGN KEY (child_story_key) REFERENCES stories(story_key)\n);\n\nCREATE TABLE IF NOT EXISTS continuations (\n continuation_id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id TEXT NOT NULL,\n run_id TEXT,\n directive_hash TEXT NOT NULL,\n kind TEXT NOT NULL, -- continue|stage|blocked|repair\n reason TEXT NOT NULL DEFAULT '',\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE INDEX IF NOT EXISTS idx_continuations_session_created ON continuations(session_id, created_at DESC);\nCREATE INDEX IF NOT EXISTS idx_continuations_run_created ON continuations(run_id, created_at DESC);\n\nCREATE TABLE IF NOT EXISTS context_snapshots (\n snapshot_id TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n stage_key TEXT NOT NULL,\n summary_md TEXT NOT NULL,\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE INDEX IF NOT EXISTS idx_context_snapshots_run_created ON context_snapshots(run_id, created_at DESC);\n\nCREATE TABLE IF NOT EXISTS agent_sessions (\n session_id TEXT PRIMARY KEY,\n parent_session_id TEXT,\n agent_name TEXT NOT NULL,\n run_id TEXT,\n stage_key TEXT,\n status TEXT NOT NULL DEFAULT 'active',\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\n-- Indexes\n\nCREATE INDEX IF NOT EXISTS idx_stories_state ON stories(state);\nCREATE INDEX IF NOT EXISTS idx_runs_story ON runs(story_key);\nCREATE INDEX IF NOT EXISTS idx_runs_status ON runs(status);\nCREATE INDEX IF NOT EXISTS idx_stage_runs_run ON stage_runs(run_id, stage_index);\nCREATE INDEX IF NOT EXISTS idx_artifacts_run_stage ON artifacts(run_id, stage_key, created_at DESC);\nCREATE INDEX IF NOT EXISTS idx_events_run ON events(run_id, created_at DESC);\nCREATE INDEX IF NOT EXISTS idx_tool_runs_run ON tool_runs(run_id, created_at DESC);\nCREATE INDEX IF NOT EXISTS idx_injects_scope_priority ON injects(scope, priority DESC, created_at DESC);\n\n-- Stronger invariants (SQLite partial indexes)\n-- Only one run may be 'running' at a time (single-repo harness by default).\nCREATE UNIQUE INDEX IF NOT EXISTS uniq_single_running_run\n ON runs(status)\n WHERE status = 'running';\n\n-- Only one story may be in_progress=1 at a time (pairs with single running run).\nCREATE UNIQUE INDEX IF NOT EXISTS uniq_single_in_progress_story\n ON stories(in_progress)\n WHERE in_progress = 1;\n\n";
2
+ export declare const SCHEMA_SQL = "\nPRAGMA foreign_keys = ON;\n\nCREATE TABLE IF NOT EXISTS repo_state (\n id INTEGER PRIMARY KEY CHECK (id = 1),\n schema_version INTEGER NOT NULL,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL,\n spec_hash_before TEXT,\n spec_hash_after TEXT,\n last_run_id TEXT,\n last_story_key TEXT,\n last_event_at TEXT\n);\n\nCREATE TABLE IF NOT EXISTS settings (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS epics (\n epic_key TEXT PRIMARY KEY,\n title TEXT NOT NULL,\n body_md TEXT NOT NULL DEFAULT '',\n state TEXT NOT NULL DEFAULT 'active',\n priority INTEGER NOT NULL DEFAULT 0,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS story_drafts (\n draft_id TEXT PRIMARY KEY,\n title TEXT NOT NULL,\n body_md TEXT NOT NULL DEFAULT '',\n meta_json TEXT NOT NULL DEFAULT '{}',\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS story_keyseq (\n id INTEGER PRIMARY KEY CHECK (id = 1),\n next_story_num INTEGER NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS stories (\n story_key TEXT PRIMARY KEY,\n epic_key TEXT,\n title TEXT NOT NULL,\n body_md TEXT NOT NULL DEFAULT '',\n state TEXT NOT NULL DEFAULT 'queued', -- queued|approved|in_progress|done|blocked|archived\n priority INTEGER NOT NULL DEFAULT 0,\n approved_at TEXT,\n locked_by_run_id TEXT,\n locked_at TEXT,\n in_progress INTEGER NOT NULL DEFAULT 0,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL,\n FOREIGN KEY (epic_key) REFERENCES epics(epic_key)\n);\n\nCREATE TABLE IF NOT EXISTS runs (\n run_id TEXT PRIMARY KEY,\n story_key TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'created', -- created|running|completed|failed|aborted\n pipeline_stages_json TEXT NOT NULL DEFAULT '[]',\n current_stage_key TEXT,\n created_at TEXT NOT NULL,\n started_at TEXT,\n completed_at TEXT,\n updated_at TEXT NOT NULL,\n error_text TEXT,\n FOREIGN KEY (story_key) REFERENCES stories(story_key)\n);\n\nCREATE TABLE IF NOT EXISTS stage_runs (\n stage_run_id TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n stage_key TEXT NOT NULL,\n stage_index INTEGER NOT NULL,\n status TEXT NOT NULL DEFAULT 'pending', -- pending|running|completed|failed|skipped\n created_at TEXT NOT NULL,\n subagent_type TEXT,\n subagent_session_id TEXT,\n started_at TEXT,\n completed_at TEXT,\n updated_at TEXT NOT NULL,\n baton_path TEXT,\n summary_md TEXT,\n output_json TEXT,\n error_text TEXT,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS artifacts (\n artifact_id TEXT PRIMARY KEY,\n run_id TEXT,\n stage_key TEXT,\n type TEXT NOT NULL, -- plan|baton|evidence|diff|log|summary|commit|tool_output|snapshot\n path TEXT NOT NULL,\n sha256 TEXT,\n meta_json TEXT NOT NULL DEFAULT '{}',\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS tool_runs (\n tool_run_id TEXT PRIMARY KEY,\n run_id TEXT,\n stage_key TEXT,\n tool_name TEXT NOT NULL,\n args_json TEXT NOT NULL DEFAULT '{}',\n output_summary TEXT NOT NULL DEFAULT '',\n output_artifact_id TEXT,\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS events (\n event_id TEXT PRIMARY KEY,\n run_id TEXT,\n stage_key TEXT,\n type TEXT NOT NULL,\n body_json TEXT NOT NULL DEFAULT '{}',\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS injects (\n inject_id TEXT PRIMARY KEY,\n type TEXT NOT NULL DEFAULT 'note',\n title TEXT NOT NULL,\n body_md TEXT NOT NULL,\n tags_json TEXT NOT NULL DEFAULT '[]',\n scope TEXT NOT NULL DEFAULT 'repo', -- repo|run:<id>|story:<key>|global\n source TEXT NOT NULL DEFAULT 'user', -- user|tool|agent|import\n priority INTEGER NOT NULL DEFAULT 50,\n expires_at TEXT,\n sha256 TEXT,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS running_batches (\n batch_id TEXT PRIMARY KEY,\n run_id TEXT,\n session_id TEXT,\n status TEXT NOT NULL DEFAULT 'running', -- running|completed|failed|aborted\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS workflow_metrics (\n metric_id TEXT PRIMARY KEY,\n run_id TEXT,\n stage_key TEXT,\n name TEXT NOT NULL,\n value_num REAL,\n value_text TEXT,\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS template_intents (\n intent_key TEXT PRIMARY KEY,\n body_md TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\n-- vNext tables\n\nCREATE TABLE IF NOT EXISTS story_relations (\n parent_story_key TEXT NOT NULL,\n child_story_key TEXT NOT NULL,\n relation_type TEXT NOT NULL DEFAULT 'split',\n reason TEXT NOT NULL DEFAULT '',\n created_at TEXT NOT NULL,\n PRIMARY KEY (parent_story_key, child_story_key),\n FOREIGN KEY (parent_story_key) REFERENCES stories(story_key),\n FOREIGN KEY (child_story_key) REFERENCES stories(story_key)\n);\n\nCREATE TABLE IF NOT EXISTS continuations (\n continuation_id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id TEXT NOT NULL,\n run_id TEXT,\n directive_hash TEXT NOT NULL,\n kind TEXT NOT NULL, -- continue|stage|blocked|repair\n reason TEXT NOT NULL DEFAULT '',\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE INDEX IF NOT EXISTS idx_continuations_session_created ON continuations(session_id, created_at DESC);\nCREATE INDEX IF NOT EXISTS idx_continuations_run_created ON continuations(run_id, created_at DESC);\n\nCREATE TABLE IF NOT EXISTS context_snapshots (\n snapshot_id TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n stage_key TEXT NOT NULL,\n summary_md TEXT NOT NULL,\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE INDEX IF NOT EXISTS idx_context_snapshots_run_created ON context_snapshots(run_id, created_at DESC);\n\nCREATE TABLE IF NOT EXISTS agent_sessions (\n session_id TEXT PRIMARY KEY,\n parent_session_id TEXT,\n agent_name TEXT NOT NULL,\n run_id TEXT,\n stage_key TEXT,\n status TEXT NOT NULL DEFAULT 'active',\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\n-- Indexes\n\nCREATE INDEX IF NOT EXISTS idx_stories_state ON stories(state);\nCREATE INDEX IF NOT EXISTS idx_runs_story ON runs(story_key);\nCREATE INDEX IF NOT EXISTS idx_runs_status ON runs(status);\nCREATE INDEX IF NOT EXISTS idx_stage_runs_run ON stage_runs(run_id, stage_index);\nCREATE INDEX IF NOT EXISTS idx_artifacts_run_stage ON artifacts(run_id, stage_key, created_at DESC);\nCREATE INDEX IF NOT EXISTS idx_events_run ON events(run_id, created_at DESC);\nCREATE INDEX IF NOT EXISTS idx_tool_runs_run ON tool_runs(run_id, created_at DESC);\nCREATE INDEX IF NOT EXISTS idx_injects_scope_priority ON injects(scope, priority DESC, created_at DESC);\n\n-- Stronger invariants (SQLite partial indexes)\n-- Only one run may be 'running' at a time (single-repo harness by default).\nCREATE UNIQUE INDEX IF NOT EXISTS uniq_single_running_run\n ON runs(status)\n WHERE status = 'running';\n\n-- Only one story may be in_progress=1 at a time (pairs with single running run).\nCREATE UNIQUE INDEX IF NOT EXISTS uniq_single_in_progress_story\n ON stories(in_progress)\n WHERE in_progress = 1;\n\n";
@@ -85,6 +85,7 @@ CREATE TABLE IF NOT EXISTS stage_runs (
85
85
  stage_key TEXT NOT NULL,
86
86
  stage_index INTEGER NOT NULL,
87
87
  status TEXT NOT NULL DEFAULT 'pending', -- pending|running|completed|failed|skipped
88
+ created_at TEXT NOT NULL,
88
89
  subagent_type TEXT,
89
90
  subagent_session_id TEXT,
90
91
  started_at TEXT,
@@ -34,6 +34,7 @@ export type StageRunRow = {
34
34
  stage_key: StageKey;
35
35
  stage_index: number;
36
36
  status: StageStatus;
37
+ created_at: string;
37
38
  subagent_type: string | null;
38
39
  subagent_session_id: string | null;
39
40
  started_at: string | null;
@@ -21,6 +21,42 @@ function ensureStageMatches(run, stage_key) {
21
21
  throw new Error(`Stage mismatch: run.current_stage_key=${run.current_stage_key} but got stage_key=${stage_key}`);
22
22
  }
23
23
  }
24
+ function splitTasksIntoStories(db, tasks, run, now, newStoryKeys, relationReason) {
25
+ for (const task of tasks) {
26
+ const complexity = task.complexity ?? 5;
27
+ const subtasks = task.subtasks ?? [];
28
+ if (subtasks.length > 0) {
29
+ // Split into subtasks
30
+ console.log(`[Astrocode] Splitting task "${task.title}" into ${subtasks.length} subtasks`);
31
+ for (const subtask of subtasks) {
32
+ const key = insertStory(db, {
33
+ title: `${task.title}: ${subtask}`,
34
+ body_md: task.description ?? "",
35
+ priority: Math.max(1, 10 - complexity),
36
+ state: "queued",
37
+ epic_key: run.story_key
38
+ });
39
+ newStoryKeys.push(key);
40
+ console.log(`[Astrocode] Created story ${key} for subtask`);
41
+ db.prepare("INSERT INTO story_relations (parent_story_key, child_story_key, relation_type, reason, created_at) VALUES (?, ?, ?, ?, ?)").run(run.story_key, key, "split", relationReason, now);
42
+ }
43
+ }
44
+ else if (complexity > 6) {
45
+ // Split complex tasks
46
+ console.log(`[Astrocode] Splitting complex task "${task.title}" (complexity ${complexity})`);
47
+ const key = insertStory(db, {
48
+ title: task.title,
49
+ body_md: task.description ?? "",
50
+ priority: Math.max(1, 10 - complexity),
51
+ state: "queued",
52
+ epic_key: run.story_key
53
+ });
54
+ newStoryKeys.push(key);
55
+ console.log(`[Astrocode] Created story ${key} for complex task`);
56
+ db.prepare("INSERT INTO story_relations (parent_story_key, child_story_key, relation_type, reason, created_at) VALUES (?, ?, ?, ?, ?)").run(run.story_key, key, "split", relationReason, now);
57
+ }
58
+ }
59
+ }
24
60
  export function createAstroStageStartTool(opts) {
25
61
  const { config, db } = opts;
26
62
  return tool({
@@ -86,6 +122,13 @@ export function createAstroStageCompleteTool(opts) {
86
122
  if (parsed.astro_json.stage_key !== sk) {
87
123
  return `❌ Stage Key Mismatch: ASTRO JSON has stage_key="${parsed.astro_json.stage_key}" but expected "${sk}". Update the JSON to match the current stage.`;
88
124
  }
125
+ // Context validation (warnings, not errors)
126
+ if (parsed.astro_json.run_id && parsed.astro_json.run_id !== rid) {
127
+ console.warn(`[Astrocode] ⚠️ Run ID mismatch in baton: expected "${rid}", got "${parsed.astro_json.run_id}". Proceeding anyway.`);
128
+ }
129
+ if (parsed.astro_json.story_key && parsed.astro_json.story_key !== run.story_key) {
130
+ console.warn(`[Astrocode] ⚠️ Story key mismatch in baton: expected "${run.story_key}", got "${parsed.astro_json.story_key}". Proceeding anyway.`);
131
+ }
89
132
  // Evidence requirement
90
133
  const evidenceRequired = (sk === "verify" && config.workflow.evidence_required.verify) ||
91
134
  (sk === "implement" && config.workflow.evidence_required.implement);
@@ -163,46 +206,13 @@ export function createAstroStageCompleteTool(opts) {
163
206
  for (const ns of parsed.astro_json.new_stories) {
164
207
  const key = insertStory(db, { title: ns.title, body_md: ns.body_md ?? "", priority: ns.priority ?? 0, state: "queued" });
165
208
  newStoryKeys.push(key);
166
- db.prepare("INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)").run(newId("rel"), run.story_key, key, relation_reason, now);
209
+ db.prepare("INSERT INTO story_relations (parent_story_key, child_story_key, relation_type, reason, created_at) VALUES (?, ?, ?, ?, ?)").run(run.story_key, key, "split", relation_reason, now);
167
210
  }
168
211
  }
169
212
  // Automatic story splitting for complex tasks
170
213
  if (allow_new_stories && parsed.astro_json.tasks?.length) {
171
214
  console.log(`[Astrocode] Splitting ${parsed.astro_json.tasks.length} tasks into stories`);
172
- for (const task of parsed.astro_json.tasks) {
173
- const complexity = task.complexity ?? 5;
174
- const subtasks = task.subtasks ?? [];
175
- if (subtasks.length > 0) {
176
- // Split into subtasks
177
- console.log(`[Astrocode] Splitting task "${task.title}" into ${subtasks.length} subtasks`);
178
- for (const subtask of subtasks) {
179
- const key = insertStory(db, {
180
- title: `${task.title}: ${subtask}`,
181
- body_md: task.description ?? "",
182
- priority: Math.max(1, 10 - complexity),
183
- state: "queued",
184
- epic_key: run.story_key
185
- });
186
- newStoryKeys.push(key);
187
- console.log(`[Astrocode] Created story ${key} for subtask`);
188
- db.prepare("INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)").run(newId("rel"), run.story_key, key, "split from implement", now);
189
- }
190
- }
191
- else if (complexity > 6) {
192
- // Split complex tasks
193
- console.log(`[Astrocode] Splitting complex task "${task.title}" (complexity ${complexity})`);
194
- const key = insertStory(db, {
195
- title: task.title,
196
- body_md: task.description ?? "",
197
- priority: Math.max(1, 10 - complexity),
198
- state: "queued",
199
- epic_key: run.story_key
200
- });
201
- newStoryKeys.push(key);
202
- console.log(`[Astrocode] Created story ${key} for complex task`);
203
- db.prepare("INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)").run(newId("rel"), run.story_key, key, "split from implement", now);
204
- }
205
- }
215
+ splitTasksIntoStories(db, parsed.astro_json.tasks, run, now, newStoryKeys, "split from stage output");
206
216
  }
207
217
  // Skip spec stage if spec already exists
208
218
  if (sk === "plan" && next === "spec") {
@@ -216,35 +226,12 @@ export function createAstroStageCompleteTool(opts) {
216
226
  }
217
227
  // Split stories during implementation if tasks are identified
218
228
  if (sk === "implement" && allow_new_stories && parsed.astro_json.tasks?.length) {
219
- for (const task of parsed.astro_json.tasks) {
220
- const complexity = task.complexity ?? 5;
221
- const subtasks = task.subtasks ?? [];
222
- if (subtasks.length > 0) {
223
- // Split into subtasks
224
- for (const subtask of subtasks) {
225
- const key = insertStory(db, {
226
- title: `${task.title}: ${subtask}`,
227
- body_md: task.description ?? "",
228
- priority: Math.max(1, 10 - complexity),
229
- state: "queued",
230
- epic_key: run.story_key
231
- });
232
- newStoryKeys.push(key);
233
- db.prepare("INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)").run(newId("rel"), run.story_key, key, "split from implement", now);
234
- }
235
- }
236
- else if (complexity > 6) {
237
- // Split complex tasks
238
- const key = insertStory(db, {
239
- title: task.title,
240
- body_md: task.description ?? "",
241
- priority: Math.max(1, 10 - complexity),
242
- state: "queued",
243
- epic_key: run.story_key
244
- });
245
- newStoryKeys.push(key);
246
- db.prepare("INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)").run(newId("rel"), run.story_key, key, "split from implement", now);
247
- }
229
+ splitTasksIntoStories(db, parsed.astro_json.tasks, run, now, newStoryKeys, "split from implement");
230
+ }
231
+ // Validate blocked status includes questions
232
+ if (parsed.astro_json.status === "blocked") {
233
+ if (!parsed.astro_json.questions || parsed.astro_json.questions.length === 0) {
234
+ return { ok: false, next_stage: null, new_stories: newStoryKeys, error: "Blocked status requires questions[] array to be non-empty. Add clarifying questions for user input." };
248
235
  }
249
236
  }
250
237
  if (parsed.astro_json.status !== "ok") {
@@ -17,7 +17,12 @@ const STAGE_TO_AGENT_MAP = {
17
17
  verify: "Verify",
18
18
  close: "Close"
19
19
  };
20
- function resolveAgentName(stageKey) {
20
+ function resolveAgentName(stageKey, config) {
21
+ // Use configurable agent names from config, fallback to hardcoded map, then General
22
+ const agentNames = config.agents?.stage_agent_names;
23
+ if (agentNames && agentNames[stageKey]) {
24
+ return agentNames[stageKey];
25
+ }
21
26
  return STAGE_TO_AGENT_MAP[stageKey] || "General";
22
27
  }
23
28
  function stageGoal(stage, cfg) {
@@ -124,16 +129,23 @@ export function createAstroWorkflowProceedTool(opts) {
124
129
  const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(active.run_id);
125
130
  const story = db.prepare("SELECT * FROM stories WHERE story_key=?").get(run.story_key);
126
131
  // Mark stage started + set subagent_type to the stage agent.
127
- let agentName = resolveAgentName(next.stage_key);
132
+ let agentName = resolveAgentName(next.stage_key, config);
128
133
  console.log(`[Astrocode] Resolving agent for ${next.stage_key}: ${agentName}`);
129
134
  console.log(`[Astrocode] Available agents:`, Object.keys(config.agent || {}));
130
- // Validate agent availability
135
+ // Validate agent availability with fallback chain
131
136
  const systemConfig = config;
132
137
  if (!systemConfig.agent || !systemConfig.agent[agentName]) {
133
138
  console.warn(`[Astrocode] Agent ${agentName} not found in config. Falling back to General.`);
134
139
  console.warn(`[Astrocode] Agent check: config.agent exists: ${!!systemConfig.agent}, agentName in config: ${systemConfig.agent ? agentName in systemConfig.agent : 'N/A'}`);
135
- // Fallback to General
136
140
  agentName = "General";
141
+ // Second fallback to orchestrator if General also unavailable
142
+ if (!systemConfig.agent || !systemConfig.agent[agentName]) {
143
+ console.warn(`[Astrocode] General agent also unavailable. Falling back to orchestrator.`);
144
+ agentName = config.agents?.orchestrator_name || "Astro";
145
+ if (!systemConfig.agent || !systemConfig.agent[agentName]) {
146
+ throw new Error(`Critical: No agents available for delegation. Primary: ${resolveAgentName(next.stage_key, config)}, General, Orchestrator: ${agentName}`);
147
+ }
148
+ }
137
149
  }
138
150
  console.log(`[Astrocode] Delegating stage ${next.stage_key} to agent: ${agentName}`);
139
151
  withTx(db, () => {
@@ -1,5 +1,5 @@
1
1
  import { clampLines, normalizeNewlines, stripCodeFences } from "../shared/text";
2
- import { z } from "zod";
2
+ import { z, ZodError } from "zod";
3
3
  export const ASTRO_JSON_BEGIN = "<!-- ASTRO_JSON_BEGIN -->";
4
4
  export const ASTRO_JSON_END = "<!-- ASTRO_JSON_END -->";
5
5
  export const StageKeySchema = z.enum(["frame", "plan", "spec", "implement", "review", "verify", "close"]);
@@ -59,28 +59,22 @@ export function parseStageOutputText(text) {
59
59
  return { baton_md: baton, astro_json: astroJson, astro_json_raw: cleaned, error: null };
60
60
  }
61
61
  catch (e) {
62
- // Fallback: create minimal valid JSON from the content
63
- console.warn(`[Astrocode] ASTRO JSON parse failed: ${String(e)}, creating fallback`);
64
- const fallbackJson = {
65
- schema_version: 1,
66
- stage_key: "frame", // Will be overridden by tool param
67
- status: "ok",
68
- summary: `Stage completed. Content: ${jsonRaw.slice(0, 200)}...`,
69
- decisions: [],
70
- next_actions: [],
71
- files: [],
72
- evidence: [],
73
- tasks: [],
74
- new_stories: [],
75
- questions: [],
76
- metrics: {}
77
- };
78
- return {
79
- baton_md: norm, // Use full text as baton
80
- astro_json: fallbackJson,
81
- astro_json_raw: JSON.stringify(fallbackJson),
82
- error: null // No error, fallback succeeded
83
- };
62
+ if (e instanceof ZodError) {
63
+ return {
64
+ baton_md: baton,
65
+ astro_json: null,
66
+ astro_json_raw: jsonRaw,
67
+ error: `Schema validation failed: ${e.message}. Ensure JSON conforms to ASTRO schema with required fields like stage_key, status, etc.`,
68
+ };
69
+ }
70
+ else {
71
+ return {
72
+ baton_md: norm,
73
+ astro_json: null,
74
+ astro_json_raw: null,
75
+ error: `JSON parsing failed: ${String(e)}. Ensure valid JSON syntax between ${ASTRO_JSON_BEGIN} and ${ASTRO_JSON_END} markers.`,
76
+ };
77
+ }
84
78
  }
85
79
  }
86
80
  export function buildBatonSummary(opts) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "astrocode-workflow",
3
- "version": "0.1.9",
3
+ "version": "0.1.12",
4
4
  "type": "module",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -18,7 +18,7 @@
18
18
  "dependencies": {
19
19
  "@opencode-ai/plugin": "^1.1.19",
20
20
  "@opencode-ai/sdk": "^1.1.19",
21
- "astrocode-workflow": "^0.1.3",
21
+ "astrocode-workflow": "^0.1.8",
22
22
  "jsonc-parser": "^3.2.0",
23
23
  "zod": "4.1.8"
24
24
  },
@@ -301,6 +301,20 @@ export function createAstroAgents(opts: {
301
301
  "utility"
302
302
  );
303
303
 
304
+ // Fallback general agent for delegation failures
305
+ agents["General"] = mk(
306
+ "General",
307
+ {
308
+ description: "General-purpose fallback agent for delegation.",
309
+ mode: "subagent",
310
+ hidden: true,
311
+ temperature: 0.1,
312
+ prompt: BASE_STAGE_PROMPT,
313
+ permission: stageReadOnlyPermissions(),
314
+ },
315
+ "utility"
316
+ );
317
+
304
318
  // Allow user config to disable certain agents
305
319
  for (const disabled of pluginConfig.disabled_agents) {
306
320
  delete agents[disabled];
@@ -62,7 +62,8 @@ export function assertInsideAstro(repoRoot: string, filePath: string) {
62
62
  const abs = path.resolve(filePath);
63
63
  const astroRoot = path.resolve(path.join(repoRoot, ".astro"));
64
64
  if (!abs.startsWith(astroRoot + path.sep) && abs !== astroRoot) {
65
- throw new Error(`Refusing to write outside .astro: ${filePath}`);
65
+ const relPath = path.relative(repoRoot, filePath);
66
+ throw new Error(`Refusing to write outside .astro: ${filePath} (relative: ${relPath}, astroRoot: ${astroRoot})`);
66
67
  }
67
68
  if (!abs.startsWith(absRepo + path.sep) && abs !== absRepo) {
68
69
  throw new Error(`Refusing to write outside repo root: ${filePath}`);
package/src/state/db.ts CHANGED
@@ -68,6 +68,19 @@ export function ensureSchema(db: SqliteDb, opts?: { allowAutoMigrate?: boolean;
68
68
  try {
69
69
  db.exec(SCHEMA_SQL);
70
70
 
71
+ // Migrations for existing databases
72
+ // Add created_at to stage_runs if missing (introduced in schema version 2)
73
+ try {
74
+ const columns = db.prepare("PRAGMA table_info(stage_runs)").all() as { name: string }[];
75
+ const hasCreatedAt = columns.some(col => col.name === 'created_at');
76
+ if (!hasCreatedAt) {
77
+ db.exec("ALTER TABLE stage_runs ADD COLUMN created_at TEXT NOT NULL DEFAULT ''");
78
+ info("[Astrocode] Added created_at column to stage_runs table");
79
+ }
80
+ } catch (e) {
81
+ // Column might already exist or table doesn't exist, ignore
82
+ }
83
+
71
84
  const row = db.prepare("SELECT schema_version FROM repo_state WHERE id = 1").get() as { schema_version?: number } | undefined;
72
85
 
73
86
  if (!row) {
@@ -87,6 +87,7 @@ CREATE TABLE IF NOT EXISTS stage_runs (
87
87
  stage_key TEXT NOT NULL,
88
88
  stage_index INTEGER NOT NULL,
89
89
  status TEXT NOT NULL DEFAULT 'pending', -- pending|running|completed|failed|skipped
90
+ created_at TEXT NOT NULL,
90
91
  subagent_type TEXT,
91
92
  subagent_session_id TEXT,
92
93
  started_at TEXT,
@@ -38,6 +38,7 @@ export type StageRunRow = {
38
38
  stage_key: StageKey;
39
39
  stage_index: number;
40
40
  status: StageStatus;
41
+ created_at: string;
41
42
  subagent_type: string | null;
42
43
  subagent_session_id: string | null;
43
44
  started_at: string | null;
@@ -26,6 +26,65 @@ function ensureStageMatches(run: any, stage_key: StageKey) {
26
26
  }
27
27
  }
28
28
 
29
+ function splitTasksIntoStories(
30
+ db: SqliteDb,
31
+ tasks: any[],
32
+ run: any,
33
+ now: string,
34
+ newStoryKeys: string[],
35
+ relationReason: string
36
+ ) {
37
+ for (const task of tasks) {
38
+ const complexity = task.complexity ?? 5;
39
+ const subtasks = task.subtasks ?? [];
40
+ if (subtasks.length > 0) {
41
+ // Split into subtasks
42
+ console.log(`[Astrocode] Splitting task "${task.title}" into ${subtasks.length} subtasks`);
43
+ for (const subtask of subtasks) {
44
+ const key = insertStory(db, {
45
+ title: `${task.title}: ${subtask}`,
46
+ body_md: task.description ?? "",
47
+ priority: Math.max(1, 10 - complexity),
48
+ state: "queued",
49
+ epic_key: run.story_key
50
+ });
51
+ newStoryKeys.push(key);
52
+ console.log(`[Astrocode] Created story ${key} for subtask`);
53
+ db.prepare(
54
+ "INSERT INTO story_relations (parent_story_key, child_story_key, relation_type, reason, created_at) VALUES (?, ?, ?, ?, ?)"
55
+ ).run(
56
+ run.story_key,
57
+ key,
58
+ "split",
59
+ relationReason,
60
+ now
61
+ );
62
+ }
63
+ } else if (complexity > 6) {
64
+ // Split complex tasks
65
+ console.log(`[Astrocode] Splitting complex task "${task.title}" (complexity ${complexity})`);
66
+ const key = insertStory(db, {
67
+ title: task.title,
68
+ body_md: task.description ?? "",
69
+ priority: Math.max(1, 10 - complexity),
70
+ state: "queued",
71
+ epic_key: run.story_key
72
+ });
73
+ newStoryKeys.push(key);
74
+ console.log(`[Astrocode] Created story ${key} for complex task`);
75
+ db.prepare(
76
+ "INSERT INTO story_relations (parent_story_key, child_story_key, relation_type, reason, created_at) VALUES (?, ?, ?, ?, ?)"
77
+ ).run(
78
+ run.story_key,
79
+ key,
80
+ "split",
81
+ relationReason,
82
+ now
83
+ );
84
+ }
85
+ }
86
+ }
87
+
29
88
  export function createAstroStageStartTool(opts: { ctx: any; config: AstrocodeConfig; db: SqliteDb }): ToolDefinition {
30
89
  const { config, db } = opts;
31
90
 
@@ -97,9 +156,17 @@ export function createAstroStageCompleteTool(opts: { ctx: any; config: Astrocode
97
156
  return `❌ JSON Parse Error: ${parsed.error ?? "ASTRO JSON missing"}. Ensure output includes <!-- ASTRO_JSON_BEGIN -->{...}<!-- ASTRO_JSON_END --> markers with valid JSON.`;
98
157
  }
99
158
 
100
- if (parsed.astro_json.stage_key !== sk) {
101
- return `❌ Stage Key Mismatch: ASTRO JSON has stage_key="${parsed.astro_json.stage_key}" but expected "${sk}". Update the JSON to match the current stage.`;
102
- }
159
+ if (parsed.astro_json.stage_key !== sk) {
160
+ return `❌ Stage Key Mismatch: ASTRO JSON has stage_key="${parsed.astro_json.stage_key}" but expected "${sk}". Update the JSON to match the current stage.`;
161
+ }
162
+
163
+ // Context validation (warnings, not errors)
164
+ if (parsed.astro_json.run_id && parsed.astro_json.run_id !== rid) {
165
+ console.warn(`[Astrocode] ⚠️ Run ID mismatch in baton: expected "${rid}", got "${parsed.astro_json.run_id}". Proceeding anyway.`);
166
+ }
167
+ if (parsed.astro_json.story_key && parsed.astro_json.story_key !== run.story_key) {
168
+ console.warn(`[Astrocode] ⚠️ Story key mismatch in baton: expected "${run.story_key}", got "${parsed.astro_json.story_key}". Proceeding anyway.`);
169
+ }
103
170
 
104
171
  // Evidence requirement
105
172
  const evidenceRequired =
@@ -211,11 +278,11 @@ export function createAstroStageCompleteTool(opts: { ctx: any; config: Astrocode
211
278
  const key = insertStory(db, { title: ns.title, body_md: ns.body_md ?? "", priority: ns.priority ?? 0, state: "queued" });
212
279
  newStoryKeys.push(key);
213
280
  db.prepare(
214
- "INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)"
281
+ "INSERT INTO story_relations (parent_story_key, child_story_key, relation_type, reason, created_at) VALUES (?, ?, ?, ?, ?)"
215
282
  ).run(
216
- newId("rel"),
217
283
  run.story_key,
218
284
  key,
285
+ "split",
219
286
  relation_reason,
220
287
  now
221
288
  );
@@ -225,55 +292,7 @@ export function createAstroStageCompleteTool(opts: { ctx: any; config: Astrocode
225
292
  // Automatic story splitting for complex tasks
226
293
  if (allow_new_stories && parsed.astro_json.tasks?.length) {
227
294
  console.log(`[Astrocode] Splitting ${parsed.astro_json.tasks.length} tasks into stories`);
228
- for (const task of parsed.astro_json.tasks) {
229
- const complexity = task.complexity ?? 5;
230
- const subtasks = task.subtasks ?? [];
231
- if (subtasks.length > 0) {
232
- // Split into subtasks
233
- console.log(`[Astrocode] Splitting task "${task.title}" into ${subtasks.length} subtasks`);
234
- for (const subtask of subtasks) {
235
- const key = insertStory(db, {
236
- title: `${task.title}: ${subtask}`,
237
- body_md: task.description ?? "",
238
- priority: Math.max(1, 10 - complexity),
239
- state: "queued",
240
- epic_key: run.story_key
241
- });
242
- newStoryKeys.push(key);
243
- console.log(`[Astrocode] Created story ${key} for subtask`);
244
- db.prepare(
245
- "INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)"
246
- ).run(
247
- newId("rel"),
248
- run.story_key,
249
- key,
250
- "split from implement",
251
- now
252
- );
253
- }
254
- } else if (complexity > 6) {
255
- // Split complex tasks
256
- console.log(`[Astrocode] Splitting complex task "${task.title}" (complexity ${complexity})`);
257
- const key = insertStory(db, {
258
- title: task.title,
259
- body_md: task.description ?? "",
260
- priority: Math.max(1, 10 - complexity),
261
- state: "queued",
262
- epic_key: run.story_key
263
- });
264
- newStoryKeys.push(key);
265
- console.log(`[Astrocode] Created story ${key} for complex task`);
266
- db.prepare(
267
- "INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)"
268
- ).run(
269
- newId("rel"),
270
- run.story_key,
271
- key,
272
- "split from implement",
273
- now
274
- );
275
- }
276
- }
295
+ splitTasksIntoStories(db, parsed.astro_json.tasks, run, now, newStoryKeys, "split from stage output");
277
296
  }
278
297
 
279
298
  // Skip spec stage if spec already exists
@@ -289,50 +308,13 @@ export function createAstroStageCompleteTool(opts: { ctx: any; config: Astrocode
289
308
 
290
309
  // Split stories during implementation if tasks are identified
291
310
  if (sk === "implement" && allow_new_stories && parsed.astro_json.tasks?.length) {
292
- for (const task of parsed.astro_json.tasks) {
293
- const complexity = task.complexity ?? 5;
294
- const subtasks = task.subtasks ?? [];
295
- if (subtasks.length > 0) {
296
- // Split into subtasks
297
- for (const subtask of subtasks) {
298
- const key = insertStory(db, {
299
- title: `${task.title}: ${subtask}`,
300
- body_md: task.description ?? "",
301
- priority: Math.max(1, 10 - complexity),
302
- state: "queued",
303
- epic_key: run.story_key
304
- });
305
- newStoryKeys.push(key);
306
- db.prepare(
307
- "INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)"
308
- ).run(
309
- newId("rel"),
310
- run.story_key,
311
- key,
312
- "split from implement",
313
- now
314
- );
315
- }
316
- } else if (complexity > 6) {
317
- // Split complex tasks
318
- const key = insertStory(db, {
319
- title: task.title,
320
- body_md: task.description ?? "",
321
- priority: Math.max(1, 10 - complexity),
322
- state: "queued",
323
- epic_key: run.story_key
324
- });
325
- newStoryKeys.push(key);
326
- db.prepare(
327
- "INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)"
328
- ).run(
329
- newId("rel"),
330
- run.story_key,
331
- key,
332
- "split from implement",
333
- now
334
- );
335
- }
311
+ splitTasksIntoStories(db, parsed.astro_json.tasks, run, now, newStoryKeys, "split from implement");
312
+ }
313
+
314
+ // Validate blocked status includes questions
315
+ if (parsed.astro_json.status === "blocked") {
316
+ if (!parsed.astro_json.questions || parsed.astro_json.questions.length === 0) {
317
+ return { ok: false as const, next_stage: null as StageKey | null, new_stories: newStoryKeys, error: "Blocked status requires questions[] array to be non-empty. Add clarifying questions for user input." };
336
318
  }
337
319
  }
338
320
 
@@ -22,7 +22,12 @@ const STAGE_TO_AGENT_MAP: Record<string, string> = {
22
22
  close: "Close"
23
23
  };
24
24
 
25
- function resolveAgentName(stageKey: string): string {
25
+ function resolveAgentName(stageKey: StageKey, config: AstrocodeConfig): string {
26
+ // Use configurable agent names from config, fallback to hardcoded map, then General
27
+ const agentNames = config.agents?.stage_agent_names;
28
+ if (agentNames && agentNames[stageKey]) {
29
+ return agentNames[stageKey];
30
+ }
26
31
  return STAGE_TO_AGENT_MAP[stageKey] || "General";
27
32
  }
28
33
 
@@ -151,18 +156,25 @@ export function createAstroWorkflowProceedTool(opts: { ctx: any; config: Astroco
151
156
  const story = db.prepare("SELECT * FROM stories WHERE story_key=?").get(run.story_key) as any;
152
157
 
153
158
  // Mark stage started + set subagent_type to the stage agent.
154
- let agentName = resolveAgentName(next.stage_key);
159
+ let agentName = resolveAgentName(next.stage_key, config);
155
160
 
156
161
  console.log(`[Astrocode] Resolving agent for ${next.stage_key}: ${agentName}`);
157
162
  console.log(`[Astrocode] Available agents:`, Object.keys((config as any).agent || {}));
158
163
 
159
- // Validate agent availability
164
+ // Validate agent availability with fallback chain
160
165
  const systemConfig = config as any;
161
166
  if (!systemConfig.agent || !systemConfig.agent[agentName]) {
162
167
  console.warn(`[Astrocode] Agent ${agentName} not found in config. Falling back to General.`);
163
168
  console.warn(`[Astrocode] Agent check: config.agent exists: ${!!systemConfig.agent}, agentName in config: ${systemConfig.agent ? agentName in systemConfig.agent : 'N/A'}`);
164
- // Fallback to General
165
169
  agentName = "General";
170
+ // Second fallback to orchestrator if General also unavailable
171
+ if (!systemConfig.agent || !systemConfig.agent[agentName]) {
172
+ console.warn(`[Astrocode] General agent also unavailable. Falling back to orchestrator.`);
173
+ agentName = config.agents?.orchestrator_name || "Astro";
174
+ if (!systemConfig.agent || !systemConfig.agent[agentName]) {
175
+ throw new Error(`Critical: No agents available for delegation. Primary: ${resolveAgentName(next.stage_key, config)}, General, Orchestrator: ${agentName}`);
176
+ }
177
+ }
166
178
  }
167
179
 
168
180
  console.log(`[Astrocode] Delegating stage ${next.stage_key} to agent: ${agentName}`);
@@ -1,7 +1,7 @@
1
1
  import { tool } from "@opencode-ai/plugin/tool";
2
2
  import type { AstrocodeConfig } from "../config/schema";
3
3
  import { clampLines, normalizeNewlines, stripCodeFences } from "../shared/text";
4
- import { z } from "zod";
4
+ import { z, ZodError } from "zod";
5
5
 
6
6
  export const ASTRO_JSON_BEGIN = "<!-- ASTRO_JSON_BEGIN -->";
7
7
  export const ASTRO_JSON_END = "<!-- ASTRO_JSON_END -->";
@@ -95,28 +95,21 @@ export function parseStageOutputText(text: string): ParsedStageOutput {
95
95
  const astroJson = AstroJsonSchema.parse(parsed);
96
96
  return { baton_md: baton, astro_json: astroJson, astro_json_raw: cleaned, error: null };
97
97
  } catch (e) {
98
- // Fallback: create minimal valid JSON from the content
99
- console.warn(`[Astrocode] ASTRO JSON parse failed: ${String(e)}, creating fallback`);
100
- const fallbackJson: AstroJson = {
101
- schema_version: 1,
102
- stage_key: "frame", // Will be overridden by tool param
103
- status: "ok",
104
- summary: `Stage completed. Content: ${jsonRaw.slice(0, 200)}...`,
105
- decisions: [],
106
- next_actions: [],
107
- files: [],
108
- evidence: [],
109
- tasks: [],
110
- new_stories: [],
111
- questions: [],
112
- metrics: {}
113
- };
114
- return {
115
- baton_md: norm, // Use full text as baton
116
- astro_json: fallbackJson,
117
- astro_json_raw: JSON.stringify(fallbackJson),
118
- error: null // No error, fallback succeeded
119
- };
98
+ if (e instanceof ZodError) {
99
+ return {
100
+ baton_md: baton,
101
+ astro_json: null,
102
+ astro_json_raw: jsonRaw,
103
+ error: `Schema validation failed: ${e.message}. Ensure JSON conforms to ASTRO schema with required fields like stage_key, status, etc.`,
104
+ };
105
+ } else {
106
+ return {
107
+ baton_md: norm,
108
+ astro_json: null,
109
+ astro_json_raw: null,
110
+ error: `JSON parsing failed: ${String(e)}. Ensure valid JSON syntax between ${ASTRO_JSON_BEGIN} and ${ASTRO_JSON_END} markers.`,
111
+ };
112
+ }
120
113
  }
121
114
  }
122
115