astrocode-workflow 0.0.2 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,2 +1,2 @@
1
- export declare const BASE_ORCH_PROMPT = "You are Astro (Orchestrator) for Astrocode.\n\nMission:\n- Advance a deterministic pipeline: frame \u2192 plan \u2192 spec \u2192 implement \u2192 review \u2192 verify \u2192 close.\n- The SQLite DB is the source of truth. Prefer tools over prose.\n- Never narrate what prompts you received.\n- Keep outputs short; store large outputs as artifacts and reference paths.\n\nOperating rules:\n- Prefer calling astro_workflow_proceed (step/loop) and astro_status.\n- Delegate stage work only to the stage subagent matching the current stage.\n- If a stage subagent returns status=blocked, inject the BLOCKED directive and stop.\n- Never delegate from subagents (enforced by permissions).\n";
1
+ export declare const BASE_ORCH_PROMPT = "You are Astro (Orchestrator) for Astrocode.\n\nMission:\n- Advance a deterministic pipeline: frame \u2192 plan \u2192 spec \u2192 implement \u2192 review \u2192 verify \u2192 close.\n- The SQLite DB is the source of truth. Prefer tools over prose.\n- Never narrate what prompts you received.\n- Keep outputs short; store large outputs as artifacts and reference paths.\n\nOperating rules:\n- Only start new runs when the user explicitly requests implementation, workflow management, or story processing.\n- Answer questions directly when possible without starting workflows.\n- Prefer calling astro_workflow_proceed (step/loop) and astro_status only when actively managing a workflow.\n- Delegate stage work only to the stage subagent matching the current stage.\n- If a stage subagent returns status=blocked, inject the BLOCKED directive and stop.\n- Never delegate from subagents (enforced by permissions).\n- Be discretionary: assess if the user's request requires workflow initiation or just information.\n";
2
2
  export declare const BASE_STAGE_PROMPT = "You are an Astro stage subagent.\n\nFollow the latest [SYSTEM DIRECTIVE: ASTROCODE \u2014 STAGE_*] you receive.\n\nOutput exactly:\n1) Baton markdown (short, structured)\n2) Valid ASTRO JSON between markers:\n<!-- ASTRO_JSON_BEGIN -->\n{...}\n<!-- ASTRO_JSON_END -->\n\nDo not narrate. If blocked, ask exactly ONE question and stop.\n";
@@ -7,10 +7,13 @@ Mission:
7
7
  - Keep outputs short; store large outputs as artifacts and reference paths.
8
8
 
9
9
  Operating rules:
10
- - Prefer calling astro_workflow_proceed (step/loop) and astro_status.
10
+ - Only start new runs when the user explicitly requests implementation, workflow management, or story processing.
11
+ - Answer questions directly when possible without starting workflows.
12
+ - Prefer calling astro_workflow_proceed (step/loop) and astro_status only when actively managing a workflow.
11
13
  - Delegate stage work only to the stage subagent matching the current stage.
12
14
  - If a stage subagent returns status=blocked, inject the BLOCKED directive and stop.
13
15
  - Never delegate from subagents (enforced by permissions).
16
+ - Be discretionary: assess if the user's request requires workflow initiation or just information.
14
17
  `;
15
18
  export const BASE_STAGE_PROMPT = `You are an Astro stage subagent.
16
19
 
@@ -106,8 +106,8 @@ export function createAstroAgents(opts) {
106
106
  verify: "AstroVerify",
107
107
  close: "AstroClose"
108
108
  },
109
- librarian_name: "AstroLibrarian",
110
- explore_name: "AstroExplore",
109
+ librarian_name: "Librarian",
110
+ explore_name: "Explore",
111
111
  agent_variant_overrides: {}
112
112
  };
113
113
  }
@@ -68,6 +68,7 @@ export declare const AstrocodeConfigSchema: z.ZodDefault<z.ZodObject<{
68
68
  loop_max_steps_hard_cap: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
69
69
  plan_max_tasks: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
70
70
  plan_max_lines: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
71
+ baton_summary_max_lines: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
71
72
  forbid_prompt_narration: z.ZodOptional<z.ZodDefault<z.ZodBoolean>>;
72
73
  single_active_run_per_repo: z.ZodOptional<z.ZodDefault<z.ZodBoolean>>;
73
74
  lock_timeout_ms: z.ZodOptional<z.ZodDefault<z.ZodNumber>>;
@@ -97,6 +97,7 @@ const WorkflowSchema = z.object({
97
97
  loop_max_steps_hard_cap: z.number().int().positive().default(200),
98
98
  plan_max_tasks: z.number().int().positive().default(500),
99
99
  plan_max_lines: z.number().int().positive().default(2000),
100
+ baton_summary_max_lines: z.number().int().positive().default(20),
100
101
  forbid_prompt_narration: z.boolean().default(true),
101
102
  single_active_run_per_repo: z.boolean().default(true),
102
103
  lock_timeout_ms: z.number().int().positive().default(4000),
@@ -138,8 +139,8 @@ const AgentsSchema = z.object({
138
139
  })
139
140
  .partial()
140
141
  .default({}),
141
- librarian_name: z.string().default("Astro — Librarian"),
142
- explore_name: z.string().default("Astro — Explore"),
142
+ librarian_name: z.string().default("Librarian"),
143
+ explore_name: z.string().default("Explore"),
143
144
  agent_variant_overrides: z
144
145
  .record(z.string(), z.object({
145
146
  variant: z.string().optional(),
@@ -1,4 +1,5 @@
1
1
  import path from "node:path";
2
+ import fs from "node:fs";
2
3
  import { tool } from "@opencode-ai/plugin/tool";
3
4
  import { withTx } from "../state/db";
4
5
  import { buildBatonSummary, parseStageOutputText } from "../workflow/baton";
@@ -94,7 +95,7 @@ export function createAstroStageCompleteTool(opts) {
94
95
  const batonSummary = buildBatonSummary({ config, stage_key: sk, astro_json: parsed.astro_json, baton_md: parsed.baton_md });
95
96
  const now = nowISO();
96
97
  const pipeline = JSON.parse(run.pipeline_stages_json ?? "[]");
97
- const next = nextStageKey(pipeline, sk);
98
+ let next = nextStageKey(pipeline, sk);
98
99
  const stageDirRel = toPosix(path.join(".astro", "runs", rid, sk));
99
100
  const batonRel = toPosix(path.join(stageDirRel, config.artifacts.baton_filename));
100
101
  const summaryRel = toPosix(path.join(stageDirRel, config.artifacts.baton_summary_filename));
@@ -198,6 +199,49 @@ export function createAstroStageCompleteTool(opts) {
198
199
  }
199
200
  }
200
201
  }
202
+ // Skip spec stage if spec already exists
203
+ if (sk === "plan" && next === "spec") {
204
+ const specPath = path.join(repoRoot, ".astro", "spec.md");
205
+ if (fs.existsSync(specPath) && fs.statSync(specPath).size > 100) {
206
+ // Skip spec
207
+ db.prepare("INSERT INTO stage_runs (stage_run_id, run_id, stage_key, status, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)")
208
+ .run(newId("stage"), rid, "spec", "skipped", now, now);
209
+ next = "implement";
210
+ }
211
+ }
212
+ // Split stories during implementation if tasks are identified
213
+ if (sk === "implement" && allow_new_stories && parsed.astro_json.tasks?.length) {
214
+ for (const task of parsed.astro_json.tasks) {
215
+ const complexity = task.complexity ?? 5;
216
+ const subtasks = task.subtasks ?? [];
217
+ if (subtasks.length > 0) {
218
+ // Split into subtasks
219
+ for (const subtask of subtasks) {
220
+ const key = insertStory(db, {
221
+ title: `${task.title}: ${subtask}`,
222
+ body_md: task.description ?? "",
223
+ priority: Math.max(1, 10 - complexity),
224
+ state: "queued",
225
+ epic_key: run.story_key
226
+ });
227
+ newStoryKeys.push(key);
228
+ db.prepare("INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)").run(newId("rel"), run.story_key, key, "split from implement", now);
229
+ }
230
+ }
231
+ else if (complexity > 6) {
232
+ // Split complex tasks
233
+ const key = insertStory(db, {
234
+ title: task.title,
235
+ body_md: task.description ?? "",
236
+ priority: Math.max(1, 10 - complexity),
237
+ state: "queued",
238
+ epic_key: run.story_key
239
+ });
240
+ newStoryKeys.push(key);
241
+ db.prepare("INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)").run(newId("rel"), run.story_key, key, "split from implement", now);
242
+ }
243
+ }
244
+ }
201
245
  if (parsed.astro_json.status !== "ok") {
202
246
  const err = parsed.astro_json.status === "blocked"
203
247
  ? `blocked: ${(parsed.astro_json.questions?.[0] ?? "needs input")}`
@@ -13,11 +13,19 @@ export function createAstroStoryQueueTool(opts) {
13
13
  priority: tool.schema.number().int().default(0),
14
14
  },
15
15
  execute: async ({ title, body_md, epic_key, priority }) => {
16
- const now = nowISO();
16
+ // If the story seems like a large implementation, convert to planning story
17
+ const isLargeImplementation = (title.toLowerCase().includes('implement') || body_md?.toLowerCase().includes('implement')) &&
18
+ (body_md?.length || 0) > 100;
19
+ let finalTitle = title;
20
+ let finalBody = body_md;
21
+ if (isLargeImplementation) {
22
+ finalTitle = `Plan and decompose: ${title}`;
23
+ finalBody = `Analyze the requirements in the provided spec and break down "${title}" into 50-200 detailed, granular implementation stories. Each story should be focused on a specific, implementable task with clear acceptance criteria.\n\nOriginal description: ${body_md}`;
24
+ }
17
25
  const story_key = withTx(db, () => {
18
- return insertStory(db, { title, body_md, epic_key: epic_key ?? null, priority: priority ?? 0, state: 'queued' });
26
+ return insertStory(db, { title: finalTitle, body_md: finalBody, epic_key: epic_key ?? null, priority: priority ?? 0, state: 'queued' });
19
27
  });
20
- return `✅ Queued story ${story_key}: ${title}`;
28
+ return `✅ Queued story ${story_key}: ${finalTitle}`;
21
29
  },
22
30
  });
23
31
  }
@@ -12,7 +12,7 @@ function stageGoal(stage, cfg) {
12
12
  case "frame":
13
13
  return "Define scope, constraints, and an unambiguous Definition of Done.";
14
14
  case "plan":
15
- return `Break down the work into 10-500 detailed tasks with subtasks, estimating complexity (1-10) for each. Focus on granular, implementable units.`;
15
+ return `Create 50-200 detailed implementation stories, each focused on a specific, implementable task. Break down every component into separate stories with clear acceptance criteria.`;
16
16
  case "spec":
17
17
  return "Produce minimal spec/contract: interfaces, invariants, acceptance checks.";
18
18
  case "implement":
@@ -32,7 +32,7 @@ function stageConstraints(stage, cfg) {
32
32
  "If blocked: ask exactly ONE question and stop.",
33
33
  ];
34
34
  if (stage === "plan") {
35
- common.push(`Aim for 10-500 tasks; no hard upper limit but prioritize granularity.`);
35
+ common.push(`Create 50-200 stories; each story must be implementable in 2-8 hours with clear acceptance criteria.`);
36
36
  }
37
37
  if (stage === "verify" && cfg.workflow.evidence_required.verify) {
38
38
  common.push("Evidence required: ASTRO JSON must include evidence[] paths.");
package/dist/ui/inject.js CHANGED
@@ -1,9 +1,20 @@
1
+ let isInjecting = false;
1
2
  export async function injectChatPrompt(opts) {
2
3
  const { ctx, sessionId, text } = opts;
3
- await ctx.client.session.prompt({
4
- path: { id: sessionId },
5
- body: {
6
- parts: [{ type: "text", text }],
7
- },
8
- });
4
+ // Skip if already injecting (max 1 at a time, no queuing)
5
+ if (isInjecting) {
6
+ return;
7
+ }
8
+ isInjecting = true;
9
+ try {
10
+ await ctx.client.session.prompt({
11
+ path: { id: sessionId },
12
+ body: {
13
+ parts: [{ type: "text", text }],
14
+ },
15
+ });
16
+ }
17
+ finally {
18
+ isInjecting = false;
19
+ }
9
20
  }
@@ -46,6 +46,11 @@ export function decideNextAction(db, config) {
46
46
  warn("Unexpected stage status in decideNextAction", { status: current.status, stage_key: current.stage_key });
47
47
  return { kind: "await_stage_completion", run_id: activeRun.run_id, stage_key: current.stage_key, stage_run_id: current.stage_run_id };
48
48
  }
49
+ function isInitialStory(db, storyKey) {
50
+ // Check if this story has no parent relations (is top-level)
51
+ const relations = db.prepare("SELECT COUNT(*) as count FROM story_relations WHERE child_key=?").get(storyKey);
52
+ return relations.count === 0;
53
+ }
49
54
  export function createRunForStory(db, config, storyKey) {
50
55
  const story = getStory(db, storyKey);
51
56
  if (!story)
@@ -55,6 +60,15 @@ export function createRunForStory(db, config, storyKey) {
55
60
  const run_id = newRunId();
56
61
  const now = nowISO();
57
62
  const pipeline = config.workflow.pipeline;
63
+ // Convert to genesis planning story if needed
64
+ const isGenesisCandidate = storyKey === 'S-0001' || isInitialStory(db, storyKey) ||
65
+ (story.body_md && story.body_md.length > 100 &&
66
+ (story.title.toLowerCase().includes('implement') || story.body_md.toLowerCase().includes('implement')));
67
+ if (isGenesisCandidate) {
68
+ const planningTitle = `Plan and decompose: ${story.title}`;
69
+ const planningBody = `Analyze the requirements and break down "${story.title}" into 50-200 detailed, granular implementation stories. Each story should be focused on a specific, implementable task with clear acceptance criteria.\n\nOriginal request: ${story.body_md || ''}`;
70
+ db.prepare("UPDATE stories SET title=?, body_md=? WHERE story_key=?").run(planningTitle, planningBody, storyKey);
71
+ }
58
72
  // Lock story
59
73
  db.prepare("UPDATE stories SET state='in_progress', in_progress=1, locked_by_run_id=?, locked_at=?, updated_at=? WHERE story_key=?").run(run_id, now, now, storyKey);
60
74
  db.prepare("INSERT INTO runs (run_id, story_key, status, pipeline_stages_json, current_stage_key, created_at, started_at, updated_at) VALUES (?, ?, 'running', ?, ?, ?, ?, ?)").run(run_id, storyKey, JSON.stringify(pipeline), pipeline[0] ?? null, now, now, now);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "astrocode-workflow",
3
- "version": "0.0.2",
3
+ "version": "0.1.1",
4
4
  "type": "module",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
@@ -18,6 +18,7 @@
18
18
  "dependencies": {
19
19
  "@opencode-ai/plugin": "^1.1.19",
20
20
  "@opencode-ai/sdk": "^1.1.19",
21
+ "astrocode-workflow": "^0.0.5",
21
22
  "jsonc-parser": "^3.2.0",
22
23
  "zod": "4.1.8"
23
24
  },
@@ -7,10 +7,13 @@ Mission:
7
7
  - Keep outputs short; store large outputs as artifacts and reference paths.
8
8
 
9
9
  Operating rules:
10
- - Prefer calling astro_workflow_proceed (step/loop) and astro_status.
10
+ - Only start new runs when the user explicitly requests implementation, workflow management, or story processing.
11
+ - Answer questions directly when possible without starting workflows.
12
+ - Prefer calling astro_workflow_proceed (step/loop) and astro_status only when actively managing a workflow.
11
13
  - Delegate stage work only to the stage subagent matching the current stage.
12
14
  - If a stage subagent returns status=blocked, inject the BLOCKED directive and stop.
13
15
  - Never delegate from subagents (enforced by permissions).
16
+ - Be discretionary: assess if the user's request requires workflow initiation or just information.
14
17
  `;
15
18
 
16
19
  export const BASE_STAGE_PROMPT = `You are an Astro stage subagent.
@@ -134,8 +134,8 @@ export function createAstroAgents(opts: {
134
134
  verify: "AstroVerify",
135
135
  close: "AstroClose"
136
136
  },
137
- librarian_name: "AstroLibrarian",
138
- explore_name: "AstroExplore",
137
+ librarian_name: "Librarian",
138
+ explore_name: "Explore",
139
139
  agent_variant_overrides: {}
140
140
  };
141
141
  }
@@ -114,6 +114,7 @@ const WorkflowSchema = z.object({
114
114
 
115
115
  plan_max_tasks: z.number().int().positive().default(500),
116
116
  plan_max_lines: z.number().int().positive().default(2000),
117
+ baton_summary_max_lines: z.number().int().positive().default(20),
117
118
 
118
119
  forbid_prompt_narration: z.boolean().default(true),
119
120
  single_active_run_per_repo: z.boolean().default(true),
@@ -164,8 +165,8 @@ const AgentsSchema = z.object({
164
165
  .partial()
165
166
  .default({}),
166
167
 
167
- librarian_name: z.string().default("Astro — Librarian"),
168
- explore_name: z.string().default("Astro — Explore"),
168
+ librarian_name: z.string().default("Librarian"),
169
+ explore_name: z.string().default("Explore"),
169
170
 
170
171
  agent_variant_overrides: z
171
172
  .record(
@@ -1,4 +1,5 @@
1
1
  import path from "node:path";
2
+ import fs from "node:fs";
2
3
  import { tool, type ToolDefinition } from "@opencode-ai/plugin/tool";
3
4
  import type { AstrocodeConfig } from "../config/schema";
4
5
  import type { SqliteDb } from "../state/db";
@@ -113,7 +114,7 @@ export function createAstroStageCompleteTool(opts: { ctx: any; config: Astrocode
113
114
 
114
115
  const now = nowISO();
115
116
  const pipeline = JSON.parse(run.pipeline_stages_json ?? "[]") as StageKey[];
116
- const next = nextStageKey(pipeline, sk);
117
+ let next = nextStageKey(pipeline, sk);
117
118
 
118
119
  const stageDirRel = toPosix(path.join(".astro", "runs", rid, sk));
119
120
  const batonRel = toPosix(path.join(stageDirRel, config.artifacts.baton_filename));
@@ -270,6 +271,66 @@ export function createAstroStageCompleteTool(opts: { ctx: any; config: Astrocode
270
271
  }
271
272
  }
272
273
 
274
+ // Skip spec stage if spec already exists
275
+ if (sk === "plan" && next === "spec") {
276
+ const specPath = path.join(repoRoot, ".astro", "spec.md");
277
+ if (fs.existsSync(specPath) && fs.statSync(specPath).size > 100) {
278
+ // Skip spec
279
+ db.prepare("INSERT INTO stage_runs (stage_run_id, run_id, stage_key, status, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?)")
280
+ .run(newId("stage"), rid, "spec", "skipped", now, now);
281
+ next = "implement";
282
+ }
283
+ }
284
+
285
+ // Split stories during implementation if tasks are identified
286
+ if (sk === "implement" && allow_new_stories && parsed.astro_json.tasks?.length) {
287
+ for (const task of parsed.astro_json.tasks) {
288
+ const complexity = task.complexity ?? 5;
289
+ const subtasks = task.subtasks ?? [];
290
+ if (subtasks.length > 0) {
291
+ // Split into subtasks
292
+ for (const subtask of subtasks) {
293
+ const key = insertStory(db, {
294
+ title: `${task.title}: ${subtask}`,
295
+ body_md: task.description ?? "",
296
+ priority: Math.max(1, 10 - complexity),
297
+ state: "queued",
298
+ epic_key: run.story_key
299
+ });
300
+ newStoryKeys.push(key);
301
+ db.prepare(
302
+ "INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)"
303
+ ).run(
304
+ newId("rel"),
305
+ run.story_key,
306
+ key,
307
+ "split from implement",
308
+ now
309
+ );
310
+ }
311
+ } else if (complexity > 6) {
312
+ // Split complex tasks
313
+ const key = insertStory(db, {
314
+ title: task.title,
315
+ body_md: task.description ?? "",
316
+ priority: Math.max(1, 10 - complexity),
317
+ state: "queued",
318
+ epic_key: run.story_key
319
+ });
320
+ newStoryKeys.push(key);
321
+ db.prepare(
322
+ "INSERT INTO story_relations (relation_id, parent_key, child_key, relation_type, created_at) VALUES (?, ?, ?, ?, ?)"
323
+ ).run(
324
+ newId("rel"),
325
+ run.story_key,
326
+ key,
327
+ "split from implement",
328
+ now
329
+ );
330
+ }
331
+ }
332
+ }
333
+
273
334
  if (parsed.astro_json.status !== "ok") {
274
335
  const err = parsed.astro_json.status === "blocked"
275
336
  ? `blocked: ${(parsed.astro_json.questions?.[0] ?? "needs input")}`
@@ -20,12 +20,23 @@ export function createAstroStoryQueueTool(opts: { ctx: any; config: AstrocodeCon
20
20
  priority: tool.schema.number().int().default(0),
21
21
  },
22
22
  execute: async ({ title, body_md, epic_key, priority }) => {
23
- const now = nowISO();
23
+ // If the story seems like a large implementation, convert to planning story
24
+ const isLargeImplementation = (title.toLowerCase().includes('implement') || body_md?.toLowerCase().includes('implement')) &&
25
+ (body_md?.length || 0) > 100;
26
+
27
+ let finalTitle = title;
28
+ let finalBody = body_md;
29
+
30
+ if (isLargeImplementation) {
31
+ finalTitle = `Plan and decompose: ${title}`;
32
+ finalBody = `Analyze the requirements in the provided spec and break down "${title}" into 50-200 detailed, granular implementation stories. Each story should be focused on a specific, implementable task with clear acceptance criteria.\n\nOriginal description: ${body_md}`;
33
+ }
34
+
24
35
  const story_key = withTx(db, () => {
25
- return insertStory(db, { title, body_md, epic_key: epic_key ?? null, priority: priority ?? 0, state: 'queued' });
36
+ return insertStory(db, { title: finalTitle, body_md: finalBody, epic_key: epic_key ?? null, priority: priority ?? 0, state: 'queued' });
26
37
  });
27
38
 
28
- return `✅ Queued story ${story_key}: ${title}`;
39
+ return `✅ Queued story ${story_key}: ${finalTitle}`;
29
40
  },
30
41
  });
31
42
  }
@@ -16,7 +16,7 @@ function stageGoal(stage: StageKey, cfg: AstrocodeConfig): string {
16
16
  case "frame":
17
17
  return "Define scope, constraints, and an unambiguous Definition of Done.";
18
18
  case "plan":
19
- return `Break down the work into 10-500 detailed tasks with subtasks, estimating complexity (1-10) for each. Focus on granular, implementable units.`;
19
+ return `Create 50-200 detailed implementation stories, each focused on a specific, implementable task. Break down every component into separate stories with clear acceptance criteria.`;
20
20
  case "spec":
21
21
  return "Produce minimal spec/contract: interfaces, invariants, acceptance checks.";
22
22
  case "implement":
@@ -38,7 +38,7 @@ function stageConstraints(stage: StageKey, cfg: AstrocodeConfig): string[] {
38
38
  ];
39
39
 
40
40
  if (stage === "plan") {
41
- common.push(`Aim for 10-500 tasks; no hard upper limit but prioritize granularity.`);
41
+ common.push(`Create 50-200 stories; each story must be implementable in 2-8 hours with clear acceptance criteria.`);
42
42
  }
43
43
  if (stage === "verify" && cfg.workflow.evidence_required.verify) {
44
44
  common.push("Evidence required: ASTRO JSON must include evidence[] paths.");
package/src/ui/inject.ts CHANGED
@@ -1,13 +1,27 @@
1
+ let isInjecting = false;
2
+
1
3
  export async function injectChatPrompt(opts: {
2
4
  ctx: any;
3
5
  sessionId: string;
4
6
  text: string;
5
7
  }) {
6
8
  const { ctx, sessionId, text } = opts;
7
- await ctx.client.session.prompt({
8
- path: { id: sessionId },
9
- body: {
10
- parts: [{ type: "text", text }],
11
- },
12
- });
9
+
10
+ // Skip if already injecting (max 1 at a time, no queuing)
11
+ if (isInjecting) {
12
+ return;
13
+ }
14
+
15
+ isInjecting = true;
16
+
17
+ try {
18
+ await ctx.client.session.prompt({
19
+ path: { id: sessionId },
20
+ body: {
21
+ parts: [{ type: "text", text }],
22
+ },
23
+ });
24
+ } finally {
25
+ isInjecting = false;
26
+ }
13
27
  }
@@ -1,4 +1,6 @@
1
1
  import type { AstrocodeConfig } from "../config/schema";
2
+ import fs from "node:fs";
3
+ import path from "node:path";
2
4
  import type { SqliteDb } from "../state/db";
3
5
  import type { RunRow, StageKey, StageRunRow, StoryRow } from "../state/types";
4
6
  import { nowISO } from "../shared/time";
@@ -72,6 +74,12 @@ export function decideNextAction(db: SqliteDb, config: AstrocodeConfig): NextAct
72
74
  return { kind: "await_stage_completion", run_id: activeRun.run_id, stage_key: current.stage_key, stage_run_id: current.stage_run_id };
73
75
  }
74
76
 
77
+ function isInitialStory(db: SqliteDb, storyKey: string): boolean {
78
+ // Check if this story has no parent relations (is top-level)
79
+ const relations = db.prepare("SELECT COUNT(*) as count FROM story_relations WHERE child_key=?").get(storyKey) as { count: number };
80
+ return relations.count === 0;
81
+ }
82
+
75
83
  export function createRunForStory(db: SqliteDb, config: AstrocodeConfig, storyKey: string): { run_id: string } {
76
84
  const story = getStory(db, storyKey);
77
85
  if (!story) throw new Error(`Story not found: ${storyKey}`);
@@ -81,10 +89,21 @@ export function createRunForStory(db: SqliteDb, config: AstrocodeConfig, storyKe
81
89
  const now = nowISO();
82
90
  const pipeline = config.workflow.pipeline;
83
91
 
84
- // Lock story
85
- db.prepare(
86
- "UPDATE stories SET state='in_progress', in_progress=1, locked_by_run_id=?, locked_at=?, updated_at=? WHERE story_key=?"
87
- ).run(run_id, now, now, storyKey);
92
+ // Convert to genesis planning story if needed
93
+ const isGenesisCandidate = storyKey === 'S-0001' || isInitialStory(db, storyKey) ||
94
+ (story.body_md && story.body_md.length > 100 &&
95
+ (story.title.toLowerCase().includes('implement') || story.body_md.toLowerCase().includes('implement')));
96
+
97
+ if (isGenesisCandidate) {
98
+ const planningTitle = `Plan and decompose: ${story.title}`;
99
+ const planningBody = `Analyze the requirements and break down "${story.title}" into 50-200 detailed, granular implementation stories. Each story should be focused on a specific, implementable task with clear acceptance criteria.\n\nOriginal request: ${story.body_md || ''}`;
100
+ db.prepare("UPDATE stories SET title=?, body_md=? WHERE story_key=?").run(planningTitle, planningBody, storyKey);
101
+ }
102
+
103
+ // Lock story
104
+ db.prepare(
105
+ "UPDATE stories SET state='in_progress', in_progress=1, locked_by_run_id=?, locked_at=?, updated_at=? WHERE story_key=?"
106
+ ).run(run_id, now, now, storyKey);
88
107
 
89
108
  db.prepare(
90
109
  "INSERT INTO runs (run_id, story_key, status, pipeline_stages_json, current_stage_key, created_at, started_at, updated_at) VALUES (?, ?, 'running', ?, ?, ?, ?, ?)"