iriai-build 0.2.8 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "iriai-build",
3
- "version": "0.2.8",
3
+ "version": "0.3.0",
4
4
  "description": "Iriai Build tool — AI agent orchestration CLI",
5
5
  "type": "module",
6
6
  "bin": {
@@ -353,7 +353,7 @@ export class SlackAdapter extends InterfaceAdapter {
353
353
  const slug = slugify(featureDesc);
354
354
 
355
355
  if (this._orchestrator) {
356
- await this._orchestrator.initializeFeature(slug, messageTs, userId);
356
+ await this._orchestrator.initializeFeature(slug, messageTs, userId, featureDesc);
357
357
  }
358
358
  }
359
359
 
package/v3/constants.js CHANGED
@@ -143,6 +143,7 @@ export const SIGNAL = {
143
143
  NEEDS_REPOS: ".needs-repos",
144
144
  GATE_EVIDENCE: ".gate-evidence.yaml",
145
145
  OUTPUT_PARTIAL: ".output.partial",
146
+ SCOPING_COMPLETE: ".scoping-complete",
146
147
  };
147
148
 
148
149
  // ─── Known Repos ─────────────────────────────────────────────────────────────
package/v3/file-io.js CHANGED
@@ -51,6 +51,8 @@ export class FileIO extends EventEmitter {
51
51
  this.emit("impl:userMessage", { slug, agent: "operator", filePath });
52
52
  } else if (fileName === ".needs-repos") {
53
53
  this.emit("impl:needsRepos", { slug, filePath });
54
+ } else if (fileName === ".scoping-complete") {
55
+ this.emit("planning:scopingComplete", { slug, filePath });
54
56
  }
55
57
  return;
56
58
  }
@@ -14,6 +14,7 @@ import { invokeOperator, invokeOperatorRelay, parseOperatorResponse } from "./op
14
14
  import {
15
15
  buildPlanningRolePrompt, buildArtifactSummarizerPrompt, buildRolePrompt, buildOrchestratorPrompt,
16
16
  buildFeatureLeadInitPrompt, buildFeatureLeadRefreshPrompt, buildFeatureLeadTriggerPrompt,
17
+ buildScopingPrompt,
17
18
  } from "./prompt-builder.js";
18
19
  import { planningComplete } from "./planning-complete.js";
19
20
  import { launchImpl } from "./launch-impl.js";
@@ -70,9 +71,9 @@ export class Orchestrator {
70
71
 
71
72
  /**
72
73
  * Initialize a feature from [FEATURE] detection: create per-feature dirs,
73
- * feature channel, operator record, and start planning pipeline.
74
+ * feature channel, operator record, and start scoping phase.
74
75
  */
75
- async initializeFeature(slug, messageTs, userId) {
76
+ async initializeFeature(slug, messageTs, userId, featureDescription = "") {
76
77
  const featureDir = path.join(IMPL_BASE, "features", slug);
77
78
 
78
79
  // 1. Create directories
@@ -100,9 +101,14 @@ export class Orchestrator {
100
101
  try { fs.unlinkSync(opDest); } catch { /* ok */ }
101
102
  fs.symlinkSync(opSrc, opDest);
102
103
 
103
- // 3. Create feature in SQLite
104
+ // 3. Create feature in SQLite — store the original description in metadata
104
105
  const feature = queries.createFeature({ slug, threadTs: messageTs, signalDir: featureDir });
105
106
  queries.insertEvent(feature.id, "system", `user:${userId}`, `Feature requested: ${slug}`);
107
+ if (featureDescription) {
108
+ const meta = queries.getFeatureMetadata(feature.id);
109
+ meta.feature_description = featureDescription;
110
+ queries.updateFeatureMetadata(feature.id, meta);
111
+ }
106
112
 
107
113
  // 4. Create feature channel immediately
108
114
  const channelId = await this.adapter.createFeatureChannel(feature.id, slug);
@@ -126,7 +132,7 @@ export class Orchestrator {
126
132
  await this.adapter.postThreadMessage(feature.id,
127
133
  `*[Pipeline]* Starting planning for *${slug}*. Implementation channel: <#${channelId}>`);
128
134
  await this.adapter.postMessage(feature.id,
129
- `*[Pipeline]* Planning pipeline started for feature: *${slug}*\nPhase 1: Product Manager interview`);
135
+ `*[Pipeline]* Feature scoping started for: *${slug}*`);
130
136
 
131
137
  // Pin artifact portal URL in the feature channel
132
138
  const portalUrl = `http://localhost:${PORTAL_PORT}/features/${slug}`;
@@ -141,7 +147,7 @@ export class Orchestrator {
141
147
  }
142
148
  } else {
143
149
  await this.adapter.postThreadMessage(feature.id,
144
- `*[Pipeline]* Starting planning pipeline for: *${slug}*\nPhase 1: Product Manager interview`);
150
+ `*[Pipeline]* Starting feature scoping for: *${slug}*`);
145
151
  }
146
152
 
147
153
  // 7. Cache signal tree with planning field
@@ -158,13 +164,95 @@ export class Orchestrator {
158
164
  // 8. Start watching per-feature planning + operator signals
159
165
  this.fileIO.watchFeaturePlanningSignals(slug, planningTree, operatorDir);
160
166
 
161
- // 9. Set active planning role and dispatch PM
162
- queries.updateFeaturePlanningRole(feature.id, "pm");
163
- this.dispatchPlanningRole(feature.id, "pm");
167
+ // 9. Dispatch scoping phase Operator converses with user to establish
168
+ // scope, blast radius, constraints before PM starts.
169
+ this._dispatchScoping(feature, operatorDir, featureDescription);
164
170
 
165
171
  return feature;
166
172
  }
167
173
 
174
+ // ═══════════════════════════════════════════════════════════════════════════
175
+ // SCOPING PHASE (Operator ↔ User conversation before PM)
176
+ // ═══════════════════════════════════════════════════════════════════════════
177
+
178
+ /**
179
+ * Dispatch the Operator for feature scoping. This is the Operator's FIRST
180
+ * invocation — it converses with the user to establish scope, blast radius,
181
+ * and constraints. The same Operator session persists via --continue for
182
+ * the rest of the feature lifecycle, preserving scoping context.
183
+ *
184
+ * When done, the Operator writes:
185
+ * - .task in PM signal dir (structured PRD request)
186
+ * - .needs-repos in operator dir (repos to pull in)
187
+ * - .scoping-complete (triggers PM dispatch)
188
+ */
189
+ _dispatchScoping(feature, operatorDir, featureDescription) {
190
+ const agent = queries.getAgentByKey(`op-${feature.slug}`);
191
+ if (!agent) {
192
+ console.error(`[orchestrator] No operator agent for ${feature.slug}`);
193
+ return;
194
+ }
195
+
196
+ const projectRoot = PROJECT_ROOT;
197
+ const directoryMapPath = path.join(projectRoot, "DIRECTORY_MAP.MD");
198
+ const hasDirectoryMap = fs.existsSync(directoryMapPath);
199
+ const pmSignalDir = path.join(feature.signal_dir, "planning", "pm");
200
+
201
+ const prompt = buildScopingPrompt({
202
+ featureName: feature.slug,
203
+ featureDescription,
204
+ operatorDir,
205
+ pmSignalDir,
206
+ planDir: path.join(feature.signal_dir, "plans"),
207
+ directoryMapPath: hasDirectoryMap ? directoryMapPath : null,
208
+ projectRoot,
209
+ });
210
+
211
+ queries.updateFeaturePlanningRole(feature.id, "scoping");
212
+ queries.insertEvent(feature.id, "system", "bridge", "Scoping phase started — Operator conversing with user");
213
+
214
+ // Fresh session — this becomes the Operator's long-lived session
215
+ // that all future --continue invocations build on.
216
+ this.supervisor.spawn(agent.id, prompt, { continue: false });
217
+ console.log(`[orchestrator] Dispatched Operator scoping for ${feature.slug}`);
218
+ }
219
+
220
+ /**
221
+ * Handle .scoping-complete signal from Operator — transition to PM dispatch.
222
+ * At this point: .task exists in PM signal dir, .needs-repos may have been written.
223
+ */
224
+ async _handleScopingComplete(slug) {
225
+ const feature = queries.getFeatureBySlug(slug);
226
+ if (!feature) return;
227
+
228
+ // Process any pending .needs-repos that may not have been picked up yet
229
+ const tree = this._signalTrees[feature.slug];
230
+ if (tree?.operator) {
231
+ const reposFile = path.join(tree.operator, SIGNAL.NEEDS_REPOS);
232
+ if (fs.existsSync(reposFile)) {
233
+ const content = readSignal(reposFile, { deleteAfter: true });
234
+ if (content) await this._handleNeedsRepos(feature, content);
235
+ }
236
+ }
237
+
238
+ // Verify the PM .task file was written
239
+ const pmSignalDir = path.join(feature.signal_dir, "planning", "pm");
240
+ const taskFile = path.join(pmSignalDir, SIGNAL.TASK);
241
+ if (!fs.existsSync(taskFile)) {
242
+ console.warn(`[orchestrator] Scoping complete for ${slug} but no .task file for PM — using feature description`);
243
+ const meta = queries.getFeatureMetadata(feature.id);
244
+ const desc = meta.feature_description || feature.slug;
245
+ writeSignal(taskFile, `Feature request: ${desc}`);
246
+ }
247
+
248
+ queries.insertEvent(feature.id, "phase-transition", "bridge", "Scoping complete — dispatching PM");
249
+ await this.adapter.postPipelineMessage(feature.id, "Scoping complete. Starting Product Manager interview.");
250
+
251
+ // Dispatch PM with the structured .task from scoping
252
+ queries.updateFeaturePlanningRole(feature.id, "pm");
253
+ this.dispatchPlanningRole(feature.id, "pm");
254
+ }
255
+
168
256
  // ═══════════════════════════════════════════════════════════════════════════
169
257
  // PLANNING PIPELINE
170
258
  // ═══════════════════════════════════════════════════════════════════════════
@@ -390,6 +478,55 @@ export class Orchestrator {
390
478
  // Compound design step: ui-designer done → surface as "designer" phase-review
391
479
  // Both sub-roles are complete; present combined output for user approval
392
480
  await this._requestPhaseReview(feature, "designer", output);
481
+ } else if (role === "architect") {
482
+ // Architect validation: ensure structured plan directory before review gate.
483
+ // The architect often drifts to writing a monolithic architecture doc
484
+ // instead of the structured plan directory (plan.yaml + phases/ + task files).
485
+ // Validate required artifacts and redispatch if incomplete.
486
+ const planDir = path.join(feature.signal_dir, "plans");
487
+ const validation = this._validateArchitectOutput(planDir);
488
+
489
+ if (validation.pass) {
490
+ await this._requestPhaseReview(feature, role, output);
491
+ } else {
492
+ // Redispatch architect with focused instructions
493
+ console.log(`[orchestrator] Architect output incomplete for ${slug}: missing ${validation.missing.join(", ")}`);
494
+ await this.adapter.postPipelineMessage(feature.id,
495
+ `Architect investigation complete — redispatching for structured plan output (missing: ${validation.missing.join(", ")})`);
496
+
497
+ const signalDir = path.join(feature.signal_dir, "planning", "architect");
498
+ ensureDir(signalDir);
499
+
500
+ // Write focused redispatch instructions as .user-message so the agent picks it up
501
+ const redispatchMsg = [
502
+ `STRUCTURED PLAN OUTPUT REQUIRED`,
503
+ ``,
504
+ `Your investigation phase is complete. You wrote a thorough context.md in $PLAN_DIR/.`,
505
+ `Now you MUST produce the structured plan directory. Read your CLAUDE.md "Output Format" section carefully.`,
506
+ ``,
507
+ `WHAT EXISTS (do NOT redo):`,
508
+ validation.found.map(f => ` ✓ ${f}`).join("\n"),
509
+ ``,
510
+ `WHAT IS MISSING (you must create these):`,
511
+ validation.missing.map(f => ` ✗ ${f}`).join("\n"),
512
+ ``,
513
+ `INSTRUCTIONS:`,
514
+ `1. Read $PLAN_DIR/context.md for your investigation notes — do NOT re-explore the entire codebase`,
515
+ `2. Do targeted source code reads ONLY where you need specific file paths, function signatures, or line numbers for task files`,
516
+ `3. Produce plan.yaml with phase DAG, role assignments, and journey refs`,
517
+ `4. Create phases/ directory with structured task files (YAML frontmatter + detailed instructions)`,
518
+ `5. Create journeys/ directory with test journeys (happy-path, failure, regression)`,
519
+ `6. Every task file must cite real file paths and code evidence from the codebase`,
520
+ `7. Signal .done + .output when complete`,
521
+ ``,
522
+ `Your context.md is the foundation. Build the structured plan ON TOP of it.`,
523
+ ].join("\n");
524
+
525
+ writeSignal(path.join(signalDir, SIGNAL.USER_MESSAGE), redispatchMsg);
526
+ queries.updateFeaturePlanningRole(feature.id, "architect");
527
+ // Fresh context — investigation session is done, architect needs full budget for structured output
528
+ this.dispatchPlanningRole(feature.id, "architect", { continue: false });
529
+ }
393
530
  } else {
394
531
  // Non-final role → summary + artifact upload + Block Kit review gate (all sequential)
395
532
  await this._requestPhaseReview(feature, role, output);
@@ -401,6 +538,72 @@ export class Orchestrator {
401
538
  }
402
539
  }
403
540
 
541
+ /**
542
+ * Validate that the architect produced the required structured plan directory.
543
+ * Auto-normalizes architecture.md → context.md if needed.
544
+ * Returns { pass: boolean, found: string[], missing: string[] }
545
+ */
546
+ _validateArchitectOutput(planDir) {
547
+ const found = [];
548
+ const missing = [];
549
+
550
+ // Normalize: rename architecture.md → context.md if architect used wrong name
551
+ const archFile = path.join(planDir, "architecture.md");
552
+ const contextFile = path.join(planDir, "context.md");
553
+ if (fs.existsSync(archFile) && !fs.existsSync(contextFile)) {
554
+ try {
555
+ fs.renameSync(archFile, contextFile);
556
+ console.log(`[orchestrator] Renamed architecture.md → context.md in ${planDir}`);
557
+ } catch { /* ok, proceed with whatever exists */ }
558
+ }
559
+
560
+ // Check context.md (investigation notes)
561
+ if (findArtifact("context", planDir) || findArtifact("architecture", planDir)) {
562
+ found.push("context.md (investigation notes)");
563
+ } else {
564
+ missing.push("context.md (investigation notes)");
565
+ }
566
+
567
+ // Check plan.yaml (phase DAG metadata)
568
+ if (fs.existsSync(path.join(planDir, "plan.yaml"))) {
569
+ found.push("plan.yaml (phase DAG)");
570
+ } else {
571
+ missing.push("plan.yaml (phase DAG + metadata)");
572
+ }
573
+
574
+ // Check phases/ directory with at least one phase subdirectory
575
+ const phasesDir = path.join(planDir, "phases");
576
+ let hasPhases = false;
577
+ try {
578
+ const entries = fs.readdirSync(phasesDir, { withFileTypes: true });
579
+ hasPhases = entries.some(e => e.isDirectory());
580
+ } catch { /* doesn't exist */ }
581
+ if (hasPhases) {
582
+ found.push("phases/ (task files)");
583
+ } else {
584
+ missing.push("phases/ directory with task files");
585
+ }
586
+
587
+ // Check journeys/ directory
588
+ const journeysDir = path.join(planDir, "journeys");
589
+ let hasJourneys = false;
590
+ try {
591
+ const entries = fs.readdirSync(journeysDir);
592
+ hasJourneys = entries.some(e => e.endsWith(".md"));
593
+ } catch { /* doesn't exist */ }
594
+ if (hasJourneys) {
595
+ found.push("journeys/ (test journeys)");
596
+ } else {
597
+ missing.push("journeys/ (test journeys)");
598
+ }
599
+
600
+ return {
601
+ pass: missing.length === 0,
602
+ found,
603
+ missing,
604
+ };
605
+ }
606
+
404
607
  /**
405
608
  * Post phase summary, upload artifact, then post Block Kit approve/reject buttons.
406
609
  * Everything is sequential to guarantee correct message ordering.
@@ -1839,6 +2042,17 @@ export class Orchestrator {
1839
2042
  }
1840
2043
  });
1841
2044
 
2045
+ // Scoping complete — Operator finished scoping conversation, dispatch PM
2046
+ this.fileIO.on("planning:scopingComplete", async ({ slug, filePath }) => {
2047
+ try {
2048
+ // Clean up the signal file
2049
+ try { fs.unlinkSync(filePath); } catch { /* ok */ }
2050
+ await this._handleScopingComplete(slug);
2051
+ } catch (err) {
2052
+ console.error("[orchestrator] Scoping complete error:", err.message);
2053
+ }
2054
+ });
2055
+
1842
2056
  // Implementation signals — FL .agent-response routed through Operator relay
1843
2057
  this.fileIO.on("impl:response", async ({ slug, agent, filePath }) => {
1844
2058
  try {
@@ -2090,6 +2304,9 @@ export class Orchestrator {
2090
2304
  if (agentName !== "operator") return;
2091
2305
  const feature = queries.getFeatureBySlug(slug);
2092
2306
  if (!feature) return;
2307
+ // During scoping phase, the long-running scoping Operator polls for
2308
+ // .user-message directly — don't consume it with ephemeral Operator.
2309
+ if (feature.active_planning_role === "scoping") return;
2093
2310
  this._handleOperatorMessage(feature);
2094
2311
  });
2095
2312
 
@@ -2286,6 +2503,23 @@ export class Orchestrator {
2286
2503
 
2287
2504
  console.log(`[orchestrator] Operator exited with code ${exitCode} after ${elapsed}ms — scheduling retry`);
2288
2505
 
2506
+ // If still in scoping phase, re-dispatch scoping (not ephemeral Operator)
2507
+ if (feature.active_planning_role === "scoping") {
2508
+ const retried = this.supervisor.scheduleRetry(agentId, async () => {
2509
+ console.log(`[orchestrator] Operator scoping retry for ${feature.slug}`);
2510
+ const freshFeature = queries.getFeatureById(feature.id) || feature;
2511
+ const meta = queries.getFeatureMetadata(freshFeature.id);
2512
+ this._dispatchScoping(freshFeature, tree.operator, meta.feature_description || "");
2513
+ return null;
2514
+ });
2515
+ if (!retried) {
2516
+ console.error(`[orchestrator] Operator scoping for ${feature.slug} exhausted retries`);
2517
+ // Fallback: skip scoping and dispatch PM directly
2518
+ this._handleScopingComplete(feature.slug);
2519
+ }
2520
+ return;
2521
+ }
2522
+
2289
2523
  // Capture stashed context for the retry closure
2290
2524
  const stashedCtx = this._lastOperatorContext?.[feature.slug];
2291
2525
 
@@ -27,8 +27,8 @@ export function compilePlanReviewHtml(planDir) {
27
27
  tabs.push({ id: "design", label: "Design Decisions", content: designContent });
28
28
  }
29
29
 
30
- // Architecture (context.md)
31
- const contextContent = readArtifact(planDir, "context");
30
+ // Architecture (context.md or architecture.md — agents may use either name)
31
+ const contextContent = readArtifact(planDir, "context") || readArtifact(planDir, "architecture");
32
32
  if (contextContent) {
33
33
  tabs.push({ id: "architecture", label: "Architecture", content: contextContent });
34
34
  }
@@ -3,6 +3,170 @@
3
3
 
4
4
  import { IRIAI_TEAM_DIR, IMPL_BASE, V3_ROLES_DIR } from "./constants.js";
5
5
 
6
+ // ─── Scoping Phase (Operator as long-running scoping agent) ──────────────────
7
+
8
+ /**
9
+ * Build prompt for the scoping phase — Operator converses with user to
10
+ * establish scope, blast radius, and constraints before PM starts.
11
+ */
12
+ export function buildScopingPrompt({ featureName, featureDescription, operatorDir, pmSignalDir, planDir, directoryMapPath, projectRoot }) {
13
+ const directoryMapSection = directoryMapPath
14
+ ? `## Directory Map (Codebase Topology)
15
+ Read the directory map for high-level understanding of the project:
16
+ DIRECTORY_MAP=${directoryMapPath}
17
+
18
+ Use this to identify which repos/services are affected by this feature.
19
+ Do NOT read source code files — only use the directory map for repo identification.`
20
+ : `## No Directory Map Available
21
+ There is no DIRECTORY_MAP.MD in this project. Use the feature description
22
+ and your conversation with the user to understand the scope. You may explore
23
+ the top-level directory structure (ls, not reading source files) to identify repos.`;
24
+
25
+ return `You are the Operator for feature '${featureName}'.
26
+
27
+ Read your CLAUDE.md for your full role definition. This is your FIRST invocation —
28
+ your session will persist via --continue for the entire feature lifecycle.
29
+
30
+ ## CURRENT TASK: Feature Scoping
31
+
32
+ Before any planning agents start, you must scope this feature with the user.
33
+ Your job is boundary-setting — narrowing the sandbox that downstream agents work within.
34
+
35
+ You are NOT:
36
+ - Writing requirements (that's the PM's job)
37
+ - Making design decisions (that's the Designer's job)
38
+ - Investigating source code (that's the Architect's job)
39
+
40
+ You ARE:
41
+ - Establishing what repos/services are in scope
42
+ - Identifying blast radius (what else might be affected)
43
+ - Capturing non-functional requirements and constraints
44
+ - Recording any upfront decisions the user already has opinions on
45
+ - Identifying if new repos need to be created
46
+
47
+ ## Signal Directories
48
+ OPERATOR_DIR=${operatorDir}
49
+ PM_SIGNAL_DIR=${pmSignalDir}
50
+ PLAN_DIR=${planDir}
51
+ PROJECT_ROOT=${projectRoot}
52
+
53
+ ${directoryMapSection}
54
+
55
+ ## Feature Description (from user)
56
+ ${featureDescription || featureName}
57
+
58
+ ## Communication Protocol
59
+ 1. To send a message to the user: write to .agent-response
60
+ cat > ${operatorDir}/.agent-response << 'MSG_EOF'
61
+ your message here
62
+ MSG_EOF
63
+ The bridge posts it to the user and deletes the file.
64
+
65
+ 2. To receive a reply: poll for .user-message
66
+ while [ ! -f ${operatorDir}/.user-message ]; do sleep 5; done
67
+ MSG=$(cat ${operatorDir}/.user-message) && rm -f ${operatorDir}/.user-message
68
+
69
+ 3. After each .agent-response write, wait 2 seconds before polling.
70
+
71
+ Format for mobile: under 300 words per message, numbered options, bold key questions.
72
+
73
+ ## Conversation Flow
74
+
75
+ 1. **Read the directory map** (if available) to understand the project topology.
76
+
77
+ 2. **Greet the user and confirm the feature request.** Restate your understanding
78
+ of what they want in 2-3 sentences. Ask if that's correct.
79
+
80
+ 3. **Ask scoping questions one at a time.** Focus on:
81
+ - What repos/services are affected? (Use directory map to suggest candidates)
82
+ - Is this extending existing functionality or building something new?
83
+ - If new: what's the new repo/app name and where does it live?
84
+ - What are the constraints? (Performance, security, accessibility, compatibility)
85
+ - Are there any hard non-negotiable requirements?
86
+ - What is explicitly OUT of scope?
87
+
88
+ 4. **Keep it brief.** 3-5 questions max. Don't duplicate PM/Designer/Architect work.
89
+ If the user gives you enough context in the first message, you can skip to the summary.
90
+
91
+ 5. **Summarize and confirm.** Present a scoping summary for user approval:
92
+ - Scope (what's being built)
93
+ - Affected repos/services
94
+ - New repos to create (if any)
95
+ - Constraints/NFRs
96
+ - Out of scope
97
+
98
+ ## When Scoping Is Complete
99
+
100
+ After the user confirms your summary, do THREE things in order:
101
+
102
+ ### 1. Write the structured PRD request for the PM
103
+ Write a .task file in the PM's signal directory. This is the PM's starting brief.
104
+
105
+ cat > ${pmSignalDir}/.task << 'TASK_EOF'
106
+ FEATURE: ${featureName}
107
+
108
+ ## User's Request
109
+ <restate the feature request clearly>
110
+
111
+ ## Scope
112
+ <what is in scope — be specific>
113
+
114
+ ## Affected Repos/Services
115
+ <list repos identified from directory map + conversation>
116
+
117
+ ## New Repos
118
+ <any new repos to create, or "none">
119
+
120
+ ## Constraints & Non-Functional Requirements
121
+ <performance, security, accessibility, compatibility requirements>
122
+
123
+ ## Out of Scope
124
+ <what is explicitly excluded>
125
+
126
+ ## User Decisions
127
+ <any decisions the user made during scoping>
128
+ TASK_EOF
129
+
130
+ ### 2. Write .needs-repos
131
+ Identify the repos from the directory map that need worktrees.
132
+ Write their paths (relative to PROJECT_ROOT) to .needs-repos.
133
+ ${directoryMapPath ? `Read DIRECTORY_MAP for the exact paths.` : `List the repo paths based on your conversation with the user.`}
134
+
135
+ For existing repos:
136
+ cat > ${operatorDir}/.needs-repos << 'REPOS_EOF'
137
+ path/to/repo1
138
+ path/to/repo2
139
+ REPOS_EOF
140
+
141
+ For NEW repos that don't exist yet, use the + prefix:
142
+ +<local-path>:<github-name>[:<template>]
143
+
144
+ Available templates: fastapi-postgres, react-parcel
145
+ If no template fits, omit it (bare scaffold with README + .gitignore).
146
+
147
+ ### 3. Signal scoping complete
148
+ echo "DONE" > ${operatorDir}/.scoping-complete
149
+
150
+ This tells the bridge to dispatch the PM with your .task file.
151
+ After this signal, you will continue operating as the Operator for this feature
152
+ (relaying messages, routing decisions, pulling in additional repos as needed).
153
+
154
+ ## Important
155
+ - Do NOT write .scoping-complete until AFTER .task and .needs-repos are written
156
+ - Do NOT read source code — only the directory map for high-level repo identification
157
+ - Keep the conversation focused — you're setting boundaries, not doing analysis
158
+ - If the user wants to skip scoping (e.g., "just build it"), write a minimal .task
159
+ from the feature description and proceed
160
+
161
+ ## Context Preservation
162
+ This scoping conversation is critical context for your entire lifecycle as Operator.
163
+ The repos you identified, the user's constraints, and their decisions must be preserved.
164
+ If you ever need to write a .handover file for context refresh, ALWAYS include the full
165
+ scoping summary (repos, scope, constraints, user decisions) verbatim — do NOT summarize
166
+ or truncate it.
167
+ `;
168
+ }
169
+
6
170
  // ─── Operator (Ephemeral) ────────────────────────────────────────────────────
7
171
 
8
172
  /**
@@ -249,7 +249,7 @@ export class SlackAdapter {
249
249
  const slug = slugify(featureDesc);
250
250
 
251
251
  if (this._orchestrator) {
252
- await this._orchestrator.initializeFeature(slug, messageTs, userId);
252
+ await this._orchestrator.initializeFeature(slug, messageTs, userId, featureDesc);
253
253
  }
254
254
  }
255
255