@yemi33/squad 0.1.9 → 0.1.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -41,9 +41,11 @@ node ~/.squad/squad.js init
41
41
  ## Quick Start
42
42
 
43
43
  ```bash
44
- # 1. Link your projects (interactive prompts for name, description, repo config)
45
- squad add ~/repo1
46
- squad add ~/repo2
44
+ # 1. Init + scanfinds all git repos on your machine, multi-select to add
45
+ squad init
46
+ # → creates config, agents, engine defaults
47
+ # → scans ~ for git repos (auto-detects host, org, branch)
48
+ # → shows numbered list, pick with "1,3,5-7" or "all"
47
49
 
48
50
  # 2. Start the engine (runs in foreground, ticks every 60s)
49
51
  squad start
@@ -53,6 +55,13 @@ squad dash
53
55
  # → http://localhost:7331
54
56
  ```
55
57
 
58
+ You can also add/scan repos later:
59
+ ```bash
60
+ squad scan # Re-scan and add more repos
61
+ squad scan ~/code 4 # Scan specific dir, depth 4
62
+ squad add ~/repo # Add a single repo interactively
63
+ ```
64
+
56
65
  ## Setup via Claude Code
57
66
 
58
67
  If you use Claude Code as your daily driver, you can set up Squad by prompting Claude directly:
@@ -91,8 +100,9 @@ squad work "Explore the codebase and document the architecture"
91
100
 
92
101
  | Command | Description |
93
102
  |---------|-------------|
94
- | `squad init` | Bootstrap `~/.squad/` with default agents and config |
95
- | `squad add <dir>` | Link a project (auto-detects settings from git, prompts to confirm) |
103
+ | `squad init` | Bootstrap `~/.squad/` then auto-scan for repos to add |
104
+ | `squad scan [dir] [depth]` | Scan for git repos and multi-select to add (default: ~, depth 3) |
105
+ | `squad add <dir>` | Link a single project (auto-detects settings from git, prompts to confirm) |
96
106
  | `squad remove <dir>` | Unlink a project |
97
107
  | `squad list` | List all linked projects with descriptions |
98
108
  | `squad start` | Start engine daemon (ticks every 60s, auto-syncs MCP servers) |
@@ -121,6 +131,7 @@ You can also run scripts directly: `node ~/.squad/engine.js start`, `node ~/.squ
121
131
  │ mcp-servers.json ← auto-sync │
122
132
  │ agents/ ← 5 agents │
123
133
  │ playbooks/ ← templates │
134
+ │ prd.json ← squad PRD │
124
135
  │ skills/ ← workflows │
125
136
  │ notes/ ← knowledge │
126
137
  └──────┬────────────────────────┘
@@ -132,8 +143,6 @@ You can also run scripts directly: `node ~/.squad/engine.js start`, `node ~/.squ
132
143
  │ .squad/ │ │ .squad/ │ │ .squad/ │
133
144
  │ work-items │ │ work-items │ │ work-items │
134
145
  │ pull-reqs │ │ pull-reqs │ │ pull-reqs │
135
- │ docs/ │ │ docs/ │ │ docs/ │
136
- │ prd-gaps │ │ prd-gaps │ │ prd-gaps │
137
146
  │ .claude/ │ │ .claude/ │ │ .claude/ │
138
147
  │ skills/ │ │ skills/ │ │ skills/ │
139
148
  └──────────────┘ └──────────────┘ └──────────────┘
@@ -141,7 +150,7 @@ You can also run scripts directly: `node ~/.squad/engine.js start`, `node ~/.squ
141
150
 
142
151
  ## What It Does
143
152
 
144
- - **Auto-discovers work** from PRD gaps, pull requests, and work queues across all linked projects
153
+ - **Auto-discovers work** from squad-level PRD (multi-project), pull requests, and work queues across all linked projects
145
154
  - **Dispatches AI agents** (Claude CLI) with full project context, git worktrees, and MCP server access
146
155
  - **Routes intelligently** — fixes first, then reviews, then implementation, matched to agent strengths
147
156
  - **Learns from itself** — agents write findings, engine consolidates into institutional knowledge
@@ -159,7 +168,7 @@ You can also run scripts directly: `node ~/.squad/engine.js start`, `node ~/.squ
159
168
  The web dashboard at `http://localhost:7331` provides:
160
169
 
161
170
  - **Projects bar** — all linked projects with descriptions (hover for full text)
162
- - **Command Center** — add work items (per-project, auto-route, or fan-out), notes, and PRD items
171
+ - **Command Center** — add work items, notes, plans, and PRD items (multi-project via `#project` tags)
163
172
  - **Squad Members** — agent cards with status, click for charter/history/output detail panel
164
173
  - **Live Output tab** — real-time streaming output for working agents (auto-refreshes every 3s)
165
174
  - **Work Items** — paginated table with status, source, type, priority, assigned agent, linked PRs, fan-out badges, and retry button for failed items
@@ -186,7 +195,6 @@ When you run `squad add <dir>`, it prompts for project details and saves them to
186
195
  "repoName": "MyProject",
187
196
  "mainBranch": "main",
188
197
  "workSources": {
189
- "prd": { "enabled": true, "path": "docs/prd-gaps.json" },
190
198
  "pullRequests": { "enabled": true, "path": ".squad/pull-requests.json" },
191
199
  "workItems": { "enabled": true, "path": ".squad/work-items.json" }
192
200
  }
@@ -281,7 +289,7 @@ The engine discovers work from 5 sources, in priority order:
281
289
  |----------|--------|---------------|
282
290
  | 1 | PRs with changes-requested | `fix` |
283
291
  | 2 | PRs pending review | `review` |
284
- | 3 | PRD items (missing/planned) | `implement` |
292
+ | 3 | Squad PRD items (missing/planned, multi-project) | `implement` |
285
293
  | 4 | Per-project work items | item's `type` |
286
294
  | 5 | Central work items | item's `type` |
287
295
 
@@ -358,10 +366,11 @@ All playbooks use `{{template_variables}}` filled from project config. The `work
358
366
 
359
367
  Uses `live-output.log` file modification time as a heartbeat:
360
368
  - **Process alive + recent output** → healthy, keep running
369
+ - **Process alive + in blocking tool call** → extended timeout (matches tool's timeout + grace period)
361
370
  - **Process alive + silent >5min** → hung, kill and mark failed
362
371
  - **No process + silent >5min** → orphaned (engine restarted), mark failed
363
372
 
364
- Agents can run for hours as long as they're producing output. The `heartbeatTimeout` (default 5min) only triggers on silence.
373
+ Agents can run for hours as long as they're producing output. The `heartbeatTimeout` (default 5min) only triggers on silence. When an agent is in a blocking tool call (e.g., `TaskOutput` with `block:true`, `Bash` with long timeout), the engine detects this from the live output and extends the timeout automatically.
365
374
 
366
375
  ### Automated Cleanup (every 10 ticks)
367
376
 
@@ -459,6 +468,7 @@ To move to a new machine: `npm install -g @yemi33/squad && squad init --force`,
459
468
  dashboard.js <- Web dashboard server
460
469
  dashboard.html <- Dashboard UI (single-file)
461
470
  config.json <- projects[], agents, engine, claude settings
471
+ prd.json <- Squad-level PRD (multi-project items)
462
472
  config.template.json <- Template for new installs
463
473
  package.json <- npm package definition
464
474
  mcp-servers.json <- MCP servers (auto-synced, gitignored)
@@ -500,6 +510,4 @@ Each linked project keeps locally:
500
510
  pull-requests.json <- PR tracker
501
511
  <project>/.claude/
502
512
  skills/ <- Project-specific skills (requires PR)
503
- <project>/docs/
504
- prd-gaps.json <- PRD gap analysis
505
513
  ```
package/dashboard.html CHANGED
@@ -80,6 +80,8 @@
80
80
  .prd-item-priority.low { background: rgba(139,148,158,0.15); color: var(--muted); }
81
81
  .prd-project-badge { font-size: 9px; padding: 1px 5px; border-radius: 6px; background: rgba(56,139,253,0.12); color: var(--blue); border: 1px solid rgba(56,139,253,0.25); white-space: nowrap; }
82
82
 
83
+ .notes-preview { max-height: 240px; overflow-y: auto; font-size: 12px; line-height: 1.6; color: var(--muted); font-family: Consolas, monospace; white-space: pre-wrap; word-wrap: break-word; background: var(--surface2); border: 1px solid var(--border); border-radius: 6px; padding: 12px 14px; cursor: pointer; transition: border-color 0.2s; }
84
+ .notes-preview:hover { border-color: var(--blue); }
83
85
  .inbox-item { background: var(--surface2); border: 1px solid var(--border); border-left: 3px solid var(--purple); border-radius: 4px; padding: 10px 12px; cursor: pointer; }
84
86
  .inbox-item:hover { border-color: var(--blue); border-left-color: var(--blue); }
85
87
  .inbox-name { font-weight: 500; font-size: 12px; color: var(--purple); margin-bottom: 4px; display: flex; justify-content: space-between; }
@@ -745,21 +747,18 @@ async function openInboxInExplorer(name) {
745
747
 
746
748
  function renderNotes(notes) {
747
749
  const el = document.getElementById('notes-list');
748
- if (!notes.length) { el.innerHTML = '<p class="empty">No team notes yet.</p>'; return; }
749
- el.innerHTML = '<div style="display:flex;flex-direction:column;gap:6px">' +
750
- notes.map(d => '<div style="font-size:12px;color:var(--text);padding:6px 10px;background:var(--surface2);border:1px solid var(--border);border-radius:4px;cursor:pointer" onclick="openNotesFile()">' + escHtml(d) + '</div>').join('') +
751
- '</div>';
750
+ if (!notes || !notes.trim()) { el.innerHTML = '<p class="empty">No team notes yet.</p>'; return; }
751
+ el.innerHTML = '<div class="notes-preview" onclick="openNotesModal()" title="Click to expand">' + escHtml(notes) + '</div>';
752
752
  }
753
753
 
754
- async function openNotesFile() {
755
- try {
756
- const content = await fetch('/api/notes-full').then(r => r.text());
757
- document.getElementById('modal-title').textContent = 'Team Notes (notes.md)';
758
- document.getElementById('modal-body').textContent = content;
759
- document.getElementById('modal-body').style.fontFamily = 'Consolas, monospace';
760
- document.getElementById('modal-body').style.whiteSpace = 'pre-wrap';
761
- document.getElementById('modal').classList.add('open');
762
- } catch {}
754
+ function openNotesModal() {
755
+ const preview = document.querySelector('.notes-preview');
756
+ if (!preview) return;
757
+ document.getElementById('modal-title').textContent = 'Team Notes';
758
+ document.getElementById('modal-body').textContent = preview.textContent;
759
+ document.getElementById('modal-body').style.fontFamily = 'Consolas, monospace';
760
+ document.getElementById('modal-body').style.whiteSpace = 'pre-wrap';
761
+ document.getElementById('modal').classList.add('open');
763
762
  }
764
763
 
765
764
  function renderPrd(prd) {
@@ -1406,10 +1405,11 @@ function cmdParseInput(raw) {
1406
1405
  type: '', // work item type (auto-detected)
1407
1406
  };
1408
1407
 
1409
- // Detect /decide or /note prefix
1410
- if (/^\/decide\b/i.test(text) || /^\/note\b/i.test(text)) {
1408
+ // Detect /decide, /note, or natural "remember" keyword
1409
+ const rememberPattern = /^(remember|remember that|don't forget|note that|keep in mind)\b/i;
1410
+ if (/^\/decide\b/i.test(text) || /^\/note\b/i.test(text) || rememberPattern.test(text)) {
1411
1411
  result.intent = 'note';
1412
- text = text.replace(/^\/decide\s*/i, '');
1412
+ text = text.replace(/^\/decide\s*/i, '').replace(/^\/note\s*/i, '').replace(rememberPattern, '').trim();
1413
1413
  } else if (/^\/plan\b/i.test(text)) {
1414
1414
  result.intent = 'plan';
1415
1415
  text = text.replace(/^\/plan\s*/i, '');
package/dashboard.js CHANGED
@@ -38,6 +38,30 @@ function safeReadDir(dir) {
38
38
  try { return fs.readdirSync(dir); } catch { return []; }
39
39
  }
40
40
 
41
+ // Atomic write with Windows EPERM retry (matches engine.js safeWrite)
42
+ function safeWrite(p, data) {
43
+ const dir = path.dirname(p);
44
+ if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
45
+ const content = typeof data === 'string' ? data : JSON.stringify(data, null, 2);
46
+ const tmp = p + '.tmp.' + process.pid;
47
+ try {
48
+ fs.writeFileSync(tmp, content);
49
+ for (let attempt = 0; attempt < 5; attempt++) {
50
+ try { fs.renameSync(tmp, p); return; } catch (e) {
51
+ if (e.code === 'EPERM' && attempt < 4) {
52
+ const delay = 50 * (attempt + 1);
53
+ const start = Date.now(); while (Date.now() - start < delay) {}
54
+ continue;
55
+ }
56
+ }
57
+ }
58
+ try { fs.unlinkSync(tmp); } catch {}
59
+ safeWrite(p, content);
60
+ } catch {
61
+ try { fs.unlinkSync(tmp); } catch {}
62
+ }
63
+ }
64
+
41
65
  function timeSince(ms) {
42
66
  const s = Math.floor((Date.now() - ms) / 1000);
43
67
  if (s < 60) return `${s}s ago`;
@@ -184,8 +208,7 @@ function getInbox() {
184
208
  }
185
209
 
186
210
  function getNotes() {
187
- const content = safeRead(path.join(SQUAD_DIR, 'notes.md')) || '';
188
- return content.split('\n').filter(l => l.startsWith('### ')).map(l => l.replace('### ', '').trim());
211
+ return safeRead(path.join(SQUAD_DIR, 'notes.md')) || '';
189
212
  }
190
213
 
191
214
  function getPullRequests() {
@@ -481,7 +504,7 @@ const server = http.createServer(async (req, res) => {
481
504
  delete item.failReason;
482
505
  delete item.failedAt;
483
506
  delete item.fanOutAgents;
484
- fs.writeFileSync(wiPath, JSON.stringify(items, null, 2));
507
+ safeWrite(wiPath, items);
485
508
 
486
509
  // Clear completed dispatch entries so the engine doesn't dedup this item
487
510
  const dispatchPath = path.join(SQUAD_DIR, 'engine', 'dispatch.json');
@@ -495,7 +518,7 @@ const server = http.createServer(async (req, res) => {
495
518
  // Also clear fan-out entries
496
519
  dispatch.completed = dispatch.completed.filter(d => !d.meta?.parentKey || d.meta.parentKey !== dispatchKey);
497
520
  if (dispatch.completed.length !== before) {
498
- fs.writeFileSync(dispatchPath, JSON.stringify(dispatch, null, 2));
521
+ safeWrite(dispatchPath, dispatch);
499
522
  }
500
523
  }
501
524
  } catch {}
@@ -544,13 +567,13 @@ const server = http.createServer(async (req, res) => {
544
567
  status.status = 'idle';
545
568
  delete status.currentTask;
546
569
  delete status.dispatched;
547
- fs.writeFileSync(statusPath, JSON.stringify(status, null, 2));
570
+ safeWrite(statusPath, status);
548
571
  } catch {}
549
572
  }
550
573
 
551
574
  // Remove item from work-items file
552
575
  items.splice(idx, 1);
553
- fs.writeFileSync(wiPath, JSON.stringify(items, null, 2));
576
+ safeWrite(wiPath, items);
554
577
 
555
578
  // Clear dispatch entries (pending, active, completed + fan-out)
556
579
  const dispatchPath = path.join(SQUAD_DIR, 'engine', 'dispatch.json');
@@ -570,7 +593,7 @@ const server = http.createServer(async (req, res) => {
570
593
  }
571
594
  }
572
595
  if (changed) {
573
- fs.writeFileSync(dispatchPath, JSON.stringify(dispatch, null, 2));
596
+ safeWrite(dispatchPath, dispatch);
574
597
  }
575
598
  } catch {}
576
599
 
@@ -611,8 +634,8 @@ const server = http.createServer(async (req, res) => {
611
634
  const existing = safeRead(archivePath);
612
635
  if (existing) { try { archive = JSON.parse(existing); } catch {} }
613
636
  archive.push(item);
614
- fs.writeFileSync(archivePath, JSON.stringify(archive, null, 2));
615
- fs.writeFileSync(wiPath, JSON.stringify(items, null, 2));
637
+ safeWrite(archivePath, archive);
638
+ safeWrite(wiPath, items);
616
639
 
617
640
  return jsonReply(res, 200, { ok: true, id });
618
641
  } catch (e) { return jsonReply(res, 400, { error: e.message }); }
@@ -642,6 +665,7 @@ const server = http.createServer(async (req, res) => {
642
665
  if (req.method === 'POST' && req.url === '/api/work-items') {
643
666
  try {
644
667
  const body = await readBody(req);
668
+ if (!body.title || !body.title.trim()) return jsonReply(res, 400, { error: 'title is required' });
645
669
  let wiPath;
646
670
  if (body.project) {
647
671
  // Write to project-specific queue
@@ -672,29 +696,24 @@ const server = http.createServer(async (req, res) => {
672
696
  if (body.agent) item.agent = body.agent;
673
697
  if (body.agents) item.agents = body.agents;
674
698
  items.push(item);
675
- fs.writeFileSync(wiPath, JSON.stringify(items, null, 2));
699
+ safeWrite(wiPath, items);
676
700
  return jsonReply(res, 200, { ok: true, id });
677
701
  } catch (e) { return jsonReply(res, 400, { error: e.message }); }
678
702
  }
679
703
 
680
- // POST /api/notes
704
+ // POST /api/notes — write to inbox so it flows through normal consolidation
681
705
  if (req.method === 'POST' && req.url === '/api/notes') {
682
706
  try {
683
707
  const body = await readBody(req);
684
- const decPath = path.join(SQUAD_DIR, 'notes.md');
685
- let content = safeRead(decPath) || '# Squad Notes\n\n## Active Notes\n';
708
+ if (!body.title || !body.title.trim()) return jsonReply(res, 400, { error: 'title is required' });
709
+ const inboxDir = path.join(SQUAD_DIR, 'notes', 'inbox');
710
+ fs.mkdirSync(inboxDir, { recursive: true });
686
711
  const today = new Date().toISOString().slice(0, 10);
687
- const entry = `\n### ${today}: ${body.title}\n**By:** ${body.author || os.userInfo().username}\n**What:** ${body.what}\n${body.why ? '**Why:** ' + body.why + '\n' : ''}\n---\n`;
688
- // Support both old and new marker formats
689
- const marker = '## Active Notes';
690
- const idx = content.indexOf(marker);
691
- if (idx !== -1) {
692
- const insertAt = idx + marker.length;
693
- content = content.slice(0, insertAt) + '\n' + entry + content.slice(insertAt);
694
- } else {
695
- content += '\n' + entry;
696
- }
697
- fs.writeFileSync(decPath, content);
712
+ const author = body.author || os.userInfo().username;
713
+ const slug = (body.title || 'note').toLowerCase().replace(/[^a-z0-9]+/g, '-').slice(0, 40);
714
+ const filename = `${author}-${slug}-${today}.md`;
715
+ const content = `# ${body.title}\n\n**By:** ${author}\n**Date:** ${today}\n\n${body.what}\n${body.why ? '\n**Why:** ' + body.why + '\n' : ''}`;
716
+ safeWrite(path.join(inboxDir, filename), content);
698
717
  return jsonReply(res, 200, { ok: true });
699
718
  } catch (e) { return jsonReply(res, 400, { error: e.message }); }
700
719
  }
@@ -703,6 +722,7 @@ const server = http.createServer(async (req, res) => {
703
722
  if (req.method === 'POST' && req.url === '/api/plan') {
704
723
  try {
705
724
  const body = await readBody(req);
725
+ if (!body.title || !body.title.trim()) return jsonReply(res, 400, { error: 'title is required' });
706
726
  // Write as a work item with type 'plan' — engine handles the chaining
707
727
  const wiPath = path.join(SQUAD_DIR, 'work-items.json');
708
728
  let items = [];
@@ -723,7 +743,7 @@ const server = http.createServer(async (req, res) => {
723
743
  if (body.project) item.project = body.project;
724
744
  if (body.agent) item.agent = body.agent;
725
745
  items.push(item);
726
- fs.writeFileSync(wiPath, JSON.stringify(items, null, 2));
746
+ safeWrite(wiPath, items);
727
747
  return jsonReply(res, 200, { ok: true, id, agent: body.agent || '' });
728
748
  } catch (e) { return jsonReply(res, 400, { error: e.message }); }
729
749
  }
@@ -732,6 +752,7 @@ const server = http.createServer(async (req, res) => {
732
752
  if (req.method === 'POST' && req.url === '/api/prd-items') {
733
753
  try {
734
754
  const body = await readBody(req);
755
+ if (!body.name || !body.name.trim()) return jsonReply(res, 400, { error: 'name is required' });
735
756
  const prdPath = path.join(SQUAD_DIR, 'prd.json');
736
757
  let data = { missing_features: [], existing_features: [], open_questions: [] };
737
758
  const existing = safeRead(prdPath);
@@ -743,7 +764,7 @@ const server = http.createServer(async (req, res) => {
743
764
  rationale: body.rationale || '', status: 'missing', affected_areas: [],
744
765
  projects: body.projects || [],
745
766
  });
746
- fs.writeFileSync(prdPath, JSON.stringify(data, null, 2));
767
+ safeWrite(prdPath, data);
747
768
  return jsonReply(res, 200, { ok: true, id: body.id });
748
769
  } catch (e) { return jsonReply(res, 400, { error: e.message }); }
749
770
  }
@@ -817,7 +838,7 @@ const server = http.createServer(async (req, res) => {
817
838
  } else {
818
839
  notes += '\n' + entry;
819
840
  }
820
- fs.writeFileSync(notesPath, notes);
841
+ safeWrite(notesPath, notes);
821
842
 
822
843
  // Move to archive
823
844
  const archiveDir = path.join(SQUAD_DIR, 'notes', 'archive');
@@ -56,28 +56,61 @@ if (!claudeBin) {
56
56
  const debugPath = path.join(__dirname, 'spawn-debug.log');
57
57
  fs.writeFileSync(debugPath, `spawn-agent.js at ${new Date().toISOString()}\nclaudeBin=${claudeBin || 'not found'}\nprompt=${promptFile}\nsysPrompt=${sysPromptFile}\nextraArgs=${extraArgs.join(' ')}\n`);
58
58
 
59
- const cliArgs = ['-p', '--system-prompt', sysPrompt, ...extraArgs];
59
+ // Pass system prompt via file to avoid ENAMETOOLONG on Windows (32KB arg limit)
60
+ // Write to a temp file and use shell-based workaround
61
+ const sysTmpPath = sysPromptFile + '.tmp';
62
+ fs.writeFileSync(sysTmpPath, sysPrompt);
63
+ const cliArgs = ['-p', '--system-prompt-file', sysTmpPath, ...extraArgs];
60
64
 
61
65
  if (!claudeBin) {
62
66
  fs.appendFileSync(debugPath, 'FATAL: Cannot find claude-code cli.js\n');
63
67
  process.exit(1);
64
68
  }
65
69
 
66
- const proc = spawn(process.execPath, [claudeBin, ...cliArgs], {
70
+ // Check if --system-prompt-file is supported by trying it; if not, fall back to inline
71
+ // but truncate to stay under Windows arg limit
72
+ let actualArgs = cliArgs;
73
+ try {
74
+ // Test: does claude support --system-prompt-file?
75
+ const testResult = require('child_process').spawnSync(process.execPath, [claudeBin, '--help'], { encoding: 'utf8', timeout: 5000 });
76
+ if (!(testResult.stdout || '').includes('system-prompt-file')) {
77
+ // Not supported — fall back to inline but safe: use --append-system-prompt with chunking
78
+ // or just inline if under 30KB
79
+ fs.unlinkSync(sysTmpPath);
80
+ if (Buffer.byteLength(sysPrompt) < 30000) {
81
+ actualArgs = ['-p', '--system-prompt', sysPrompt, ...extraArgs];
82
+ } else {
83
+ // Too large for inline — prepend system prompt to the user prompt via stdin
84
+ actualArgs = ['-p', ...extraArgs];
85
+ // We'll inject system prompt into stdin along with the prompt below
86
+ }
87
+ }
88
+ } catch {
89
+ // If help check fails, try file approach anyway
90
+ }
91
+
92
+ const proc = spawn(process.execPath, [claudeBin, ...actualArgs], {
67
93
  stdio: ['pipe', 'pipe', 'pipe'],
68
94
  env
69
95
  });
70
96
 
71
- fs.appendFileSync(debugPath, `PID=${proc.pid || 'none'}\n`);
97
+ fs.appendFileSync(debugPath, `PID=${proc.pid || 'none'}\nargs=${actualArgs.join(' ').slice(0, 500)}\n`);
72
98
 
73
99
  // Write PID file for parent engine to verify spawn
74
100
  const pidFile = promptFile.replace(/prompt-/, 'pid-').replace(/\.md$/, '.pid');
75
101
  fs.writeFileSync(pidFile, String(proc.pid || ''));
76
102
 
77
- // Send prompt via stdin
78
- proc.stdin.write(prompt);
103
+ // Send prompt via stdin — if system prompt couldn't be passed via args, prepend it
104
+ if (!actualArgs.includes('--system-prompt') && !actualArgs.includes('--system-prompt-file')) {
105
+ proc.stdin.write(`<system>\n${sysPrompt}\n</system>\n\n${prompt}`);
106
+ } else {
107
+ proc.stdin.write(prompt);
108
+ }
79
109
  proc.stdin.end();
80
110
 
111
+ // Clean up temp file
112
+ setTimeout(() => { try { fs.unlinkSync(sysTmpPath); } catch {} }, 5000);
113
+
81
114
  // Capture stderr separately for debugging
82
115
  let stderrBuf = '';
83
116
  proc.stderr.on('data', (chunk) => {
package/engine.js CHANGED
@@ -896,6 +896,7 @@ function completeDispatch(id, result = 'success', reason = '') {
896
896
  }
897
897
 
898
898
  // ─── Dependency Gate ─────────────────────────────────────────────────────────
899
+ // Returns: true (deps met), false (deps pending), 'failed' (dep failed — propagate)
899
900
  function areDependenciesMet(item, config) {
900
901
  const deps = item.depends_on;
901
902
  if (!deps || deps.length === 0) return true;
@@ -911,7 +912,12 @@ function areDependenciesMet(item, config) {
911
912
  const workItems = safeJson(wiPath) || [];
912
913
  for (const depId of deps) {
913
914
  const depItem = workItems.find(w => w.sourcePlan === sourcePlan && w.planItemId === depId);
914
- if (!depItem || depItem.status !== 'done') return false;
915
+ if (!depItem) {
916
+ log('warn', `Dependency ${depId} not found for ${item.id} (plan: ${sourcePlan}) — treating as unmet`);
917
+ return false;
918
+ }
919
+ if (depItem.status === 'failed') return 'failed'; // Propagate failure
920
+ if (depItem.status !== 'done') return false;
915
921
  }
916
922
  return true;
917
923
  }
@@ -948,6 +954,14 @@ function checkPlanCompletion(meta, config) {
948
954
  const planItems = workItems.filter(w => w.sourcePlan === planFile && w.planItemId !== 'PR');
949
955
  if (planItems.length === 0) return;
950
956
  if (!planItems.every(w => w.status === 'done')) return;
957
+
958
+ // Dedup guard: check if PR item already exists for this plan
959
+ const existingPrItem = workItems.find(w => w.sourcePlan === planFile && w.planItemId === 'PR');
960
+ if (existingPrItem) {
961
+ log('debug', `Plan ${planFile} already has PR item ${existingPrItem.id} — skipping`);
962
+ if (plan.status !== 'completed') { plan.status = 'completed'; plan.completedAt = ts(); safeWrite(path.join(PLANS_DIR, planFile), plan); }
963
+ return;
964
+ }
951
965
  log('info', `All ${planItems.length} items in plan ${planFile} completed — creating PR work item`);
952
966
  const maxNum = workItems.reduce((max, i) => {
953
967
  const m = (i.id || '').match(/(\d+)$/);
@@ -993,20 +1007,27 @@ function chainPlanToPrd(dispatchItem, meta, config) {
993
1007
  return;
994
1008
  }
995
1009
 
996
- // Find the plan file look for recently created .md files in plans/
997
- const planFiles = fs.readdirSync(planDir)
998
- .filter(f => f.endsWith('.md'))
999
- .map(f => ({ name: f, mtime: fs.statSync(path.join(planDir, f)).mtimeMs }))
1000
- .sort((a, b) => b.mtime - a.mtime);
1001
-
1002
- // Use the most recently modified plan file (the one the plan agent just wrote)
1003
- const planFile = planFiles[0];
1004
- if (!planFile) {
1005
- log('warn', `Plan chaining: no .md plan files found in plans/ after task ${dispatchItem.id}`);
1006
- return;
1010
+ // Use the plan filename from dispatch meta (set during plan task creation)
1011
+ // Falls back to mtime-based detection if meta doesn't have it
1012
+ let planFileName = meta?.planFileName || meta?.item?._planFileName;
1013
+ if (planFileName && fs.existsSync(path.join(planDir, planFileName))) {
1014
+ // Exact match from meta — no guessing
1015
+ } else {
1016
+ // Fallback: find most recently modified .md file
1017
+ const planFiles = fs.readdirSync(planDir)
1018
+ .filter(f => f.endsWith('.md'))
1019
+ .map(f => ({ name: f, mtime: fs.statSync(path.join(planDir, f)).mtimeMs }))
1020
+ .sort((a, b) => b.mtime - a.mtime);
1021
+ planFileName = planFiles[0]?.name;
1022
+ if (!planFileName) {
1023
+ log('warn', `Plan chaining: no .md plan files found in plans/ after task ${dispatchItem.id}`);
1024
+ return;
1025
+ }
1026
+ log('info', `Plan chaining: using mtime fallback — found ${planFileName}`);
1007
1027
  }
1008
1028
 
1009
- const planPath = path.join(planDir, planFile.name);
1029
+ const planFile = { name: planFileName };
1030
+ const planPath = path.join(planDir, planFileName);
1010
1031
  let planContent;
1011
1032
  try { planContent = fs.readFileSync(planPath, 'utf8'); } catch (e) {
1012
1033
  log('error', `Plan chaining: failed to read plan file ${planFile.name}: ${e.message}`);
@@ -1165,6 +1186,10 @@ function updateWorkItemStatus(meta, status, reason) {
1165
1186
  const anySuccess = results.some(r => r.status === 'done');
1166
1187
  const allDone = target.fanOutAgents ? results.length >= target.fanOutAgents.length : false;
1167
1188
 
1189
+ // Timeout: if dispatched > 6 hours ago and not all agents reported, treat partial results as final
1190
+ const dispatchAge = target.dispatched_at ? Date.now() - new Date(target.dispatched_at).getTime() : 0;
1191
+ const timedOut = !allDone && dispatchAge > 6 * 60 * 60 * 1000 && results.length > 0;
1192
+
1168
1193
  if (anySuccess) {
1169
1194
  target.status = 'done';
1170
1195
  delete target.failReason;
@@ -1172,9 +1197,11 @@ function updateWorkItemStatus(meta, status, reason) {
1172
1197
  target.completedAgents = Object.entries(target.agentResults)
1173
1198
  .filter(([, r]) => r.status === 'done')
1174
1199
  .map(([a]) => a);
1175
- } else if (allDone) {
1200
+ } else if (allDone || timedOut) {
1176
1201
  target.status = 'failed';
1177
- target.failReason = 'All fan-out agents failed';
1202
+ target.failReason = timedOut
1203
+ ? `Fan-out timed out: ${results.length}/${(target.fanOutAgents || []).length} agents reported (all failed)`
1204
+ : 'All fan-out agents failed';
1178
1205
  target.failedAt = ts();
1179
1206
  }
1180
1207
  } else {
@@ -2628,6 +2655,26 @@ function runCleanup(config, verbose = false) {
2628
2655
  } catch {}
2629
2656
  }
2630
2657
 
2658
+ // Skip worktrees for active shared-branch plans
2659
+ if (shouldClean) {
2660
+ try {
2661
+ const planDir = path.join(SQUAD_DIR, 'plans');
2662
+ if (fs.existsSync(planDir)) {
2663
+ for (const pf of fs.readdirSync(planDir).filter(f => f.endsWith('.json'))) {
2664
+ const plan = safeJson(path.join(planDir, pf));
2665
+ if (plan?.branch_strategy === 'shared-branch' && plan?.feature_branch && plan?.status !== 'completed') {
2666
+ const planBranch = sanitizeBranch(plan.feature_branch);
2667
+ if (dir === planBranch || dir.includes(planBranch) || planBranch.includes(dir)) {
2668
+ shouldClean = false;
2669
+ if (verbose) console.log(` Skipping worktree ${dir}: active shared-branch plan`);
2670
+ break;
2671
+ }
2672
+ }
2673
+ }
2674
+ }
2675
+ } catch {}
2676
+ }
2677
+
2631
2678
  if (shouldClean) {
2632
2679
  try {
2633
2680
  execSync(`git worktree remove "${wtPath}" --force`, { cwd: root, stdio: 'pipe' });
@@ -2663,8 +2710,23 @@ function runCleanup(config, verbose = false) {
2663
2710
  // 5. Clean spawn-debug.log
2664
2711
  try { fs.unlinkSync(path.join(ENGINE_DIR, 'spawn-debug.log')); } catch {}
2665
2712
 
2666
- if (cleaned.tempFiles + cleaned.liveOutputs + cleaned.worktrees + cleaned.zombies > 0) {
2667
- log('info', `Cleanup: ${cleaned.tempFiles} temp files, ${cleaned.liveOutputs} live outputs, ${cleaned.worktrees} worktrees, ${cleaned.zombies} zombies`);
2713
+ // 6. Prune old output archive files (keep last 30 per agent)
2714
+ for (const agentId of Object.keys(config.agents || {})) {
2715
+ const agentDir = path.join(SQUAD_DIR, 'agents', agentId);
2716
+ if (!fs.existsSync(agentDir)) continue;
2717
+ try {
2718
+ const outputFiles = fs.readdirSync(agentDir)
2719
+ .filter(f => f.startsWith('output-') && f.endsWith('.log') && f !== 'output.log')
2720
+ .map(f => ({ name: f, mtime: fs.statSync(path.join(agentDir, f)).mtimeMs }))
2721
+ .sort((a, b) => b.mtime - a.mtime);
2722
+ for (const old of outputFiles.slice(30)) {
2723
+ try { fs.unlinkSync(path.join(agentDir, old.name)); cleaned.files++; } catch {}
2724
+ }
2725
+ } catch {}
2726
+ }
2727
+
2728
+ if (cleaned.tempFiles + cleaned.liveOutputs + cleaned.worktrees + cleaned.zombies + (cleaned.files || 0) > 0) {
2729
+ log('info', `Cleanup: ${cleaned.tempFiles} temp files, ${cleaned.liveOutputs} live outputs, ${cleaned.worktrees} worktrees, ${cleaned.zombies} zombies, ${cleaned.files || 0} old output archives`);
2668
2730
  }
2669
2731
 
2670
2732
  return cleaned;
@@ -2693,6 +2755,11 @@ function saveCooldowns() {
2693
2755
  if (_cooldownWritePending) return;
2694
2756
  _cooldownWritePending = true;
2695
2757
  setTimeout(() => {
2758
+ // Prune expired entries (>24h) before saving
2759
+ const now = Date.now();
2760
+ for (const [k, v] of dispatchCooldowns) {
2761
+ if (now - v.timestamp > 24 * 60 * 60 * 1000) dispatchCooldowns.delete(k);
2762
+ }
2696
2763
  const obj = Object.fromEntries(dispatchCooldowns);
2697
2764
  safeWrite(COOLDOWN_PATH, obj);
2698
2765
  _cooldownWritePending = false;
@@ -3187,8 +3254,17 @@ function discoverFromWorkItems(config, project) {
3187
3254
  for (const item of items) {
3188
3255
  if (item.status !== 'queued' && item.status !== 'pending') continue;
3189
3256
 
3190
- // Dependency gate: skip items whose depends_on are not yet met
3191
- if (item.depends_on && item.depends_on.length > 0 && !areDependenciesMet(item, config)) continue;
3257
+ // Dependency gate: skip items whose depends_on are not yet met; propagate failure
3258
+ if (item.depends_on && item.depends_on.length > 0) {
3259
+ const depStatus = areDependenciesMet(item, config);
3260
+ if (depStatus === 'failed') {
3261
+ item.status = 'failed';
3262
+ item.failReason = 'Dependency failed — cannot proceed';
3263
+ log('warn', `Marking ${item.id} as failed: dependency failed (plan: ${item.sourcePlan})`);
3264
+ continue;
3265
+ }
3266
+ if (!depStatus) continue;
3267
+ }
3192
3268
 
3193
3269
  const key = `work-${project?.name || 'default'}-${item.id}`;
3194
3270
  if (isAlreadyDispatched(key) || isOnCooldown(key, cooldownMs)) { skipped.gated++; continue; }
@@ -3593,6 +3669,8 @@ function discoverCentralWorkItems(config) {
3593
3669
  vars.task_description = item.title;
3594
3670
  vars.notes_content = '';
3595
3671
  try { vars.notes_content = fs.readFileSync(path.join(SQUAD_DIR, 'notes.md'), 'utf8'); } catch {}
3672
+ // Track expected plan filename in meta for chainPlanToPrd
3673
+ item._planFileName = planFileName;
3596
3674
  }
3597
3675
 
3598
3676
  // Inject ask-specific variables for the ask playbook
@@ -3615,7 +3693,7 @@ function discoverCentralWorkItems(config) {
3615
3693
  agentRole,
3616
3694
  task: item.title || item.description?.slice(0, 80) || item.id,
3617
3695
  prompt,
3618
- meta: { dispatchKey: key, source: 'central-work-item', item }
3696
+ meta: { dispatchKey: key, source: 'central-work-item', item, planFileName: item._planFileName || null }
3619
3697
  });
3620
3698
 
3621
3699
  item.status = 'dispatched';
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@yemi33/squad",
3
- "version": "0.1.9",
3
+ "version": "0.1.10",
4
4
  "description": "Multi-agent AI dev team that runs from ~/.squad/ — five autonomous agents share a single engine, dashboard, and knowledge base",
5
5
  "bin": {
6
6
  "squad": "bin/squad.js"
package/squad.js CHANGED
@@ -165,12 +165,6 @@ async function addProject(targetDir) {
165
165
  mainBranch,
166
166
  prUrlBase,
167
167
  workSources: {
168
- prd: {
169
- enabled: true,
170
- path: 'docs/prd-gaps.json',
171
- itemFilter: { status: ['missing', 'planned'] },
172
- cooldownMinutes: 30
173
- },
174
168
  pullRequests: {
175
169
  enabled: true,
176
170
  path: '.squad/pull-requests.json',
@@ -244,11 +238,174 @@ function listProjects() {
244
238
  }
245
239
  }
246
240
 
241
+ // ─── Scan & Multi-Select ─────────────────────────────────────────────────────
242
+
243
+ function findGitRepos(rootDir, maxDepth = 3) {
244
+ const repos = [];
245
+ const visited = new Set();
246
+
247
+ function walk(dir, depth) {
248
+ if (depth > maxDepth || visited.has(dir)) return;
249
+ visited.add(dir);
250
+ try {
251
+ // Skip common non-project dirs
252
+ const base = path.basename(dir);
253
+ if (['node_modules', '.git', '.hg', 'AppData', '$Recycle.Bin', 'Windows', 'Program Files',
254
+ 'Program Files (x86)', '.cache', '.npm', '.yarn', '.nuget', 'worktrees'].includes(base)) return;
255
+
256
+ const gitDir = path.join(dir, '.git');
257
+ if (fs.existsSync(gitDir)) {
258
+ repos.push(dir);
259
+ return; // Don't recurse into git repos (they may have nested submodules)
260
+ }
261
+
262
+ const entries = fs.readdirSync(dir, { withFileTypes: true });
263
+ for (const entry of entries) {
264
+ if (entry.isDirectory() || entry.isSymbolicLink()) {
265
+ walk(path.join(dir, entry.name), depth + 1);
266
+ }
267
+ }
268
+ } catch {} // permission errors, etc.
269
+ }
270
+
271
+ walk(rootDir, 0);
272
+ return repos;
273
+ }
274
+
275
+ async function scanAndAdd() {
276
+ const homeDir = process.env.USERPROFILE || process.env.HOME || '';
277
+ const scanRoot = rest[0] || homeDir;
278
+ const maxDepth = parseInt(rest[1]) || 3;
279
+
280
+ console.log(`\n Scanning for git repos in: ${scanRoot}`);
281
+ console.log(` Max depth: ${maxDepth}\n`);
282
+
283
+ const repos = findGitRepos(scanRoot, maxDepth);
284
+ if (repos.length === 0) {
285
+ console.log(' No git repositories found.\n');
286
+ rl.close();
287
+ return;
288
+ }
289
+
290
+ const config = loadConfig();
291
+ const linkedPaths = new Set((config.projects || []).map(p => path.resolve(p.localPath)));
292
+
293
+ // Enrich repos with auto-discovered metadata
294
+ const enriched = repos.map(repoPath => {
295
+ const detected = autoDiscover(repoPath);
296
+ const alreadyLinked = linkedPaths.has(path.resolve(repoPath));
297
+ return {
298
+ path: repoPath,
299
+ name: detected.name || detected.repoName || path.basename(repoPath),
300
+ host: detected.repoHost || '?',
301
+ org: detected.org || '',
302
+ project: detected.project || '',
303
+ repoName: detected.repoName || path.basename(repoPath),
304
+ mainBranch: detected.mainBranch || 'main',
305
+ description: detected.description || '',
306
+ linked: alreadyLinked,
307
+ };
308
+ });
309
+
310
+ console.log(` Found ${enriched.length} git repo(s):\n`);
311
+ enriched.forEach((r, i) => {
312
+ const tag = r.linked ? ' (already linked)' : '';
313
+ const hostTag = r.host === 'ado' ? 'ADO' : r.host === 'github' ? 'GitHub' : 'git';
314
+ console.log(` ${String(i + 1).padStart(3)}. ${r.name} [${hostTag}]${tag}`);
315
+ console.log(` ${r.path}`);
316
+ });
317
+
318
+ console.log('\n Enter numbers to add (comma-separated, ranges ok, e.g. "1,3,5-7")');
319
+ console.log(' Or "all" to add all unlinked repos, "q" to quit.\n');
320
+
321
+ const answer = await ask('Select repos', '');
322
+ if (!answer || answer.toLowerCase() === 'q') {
323
+ console.log(' Cancelled.\n');
324
+ rl.close();
325
+ return;
326
+ }
327
+
328
+ // Parse selection
329
+ let indices;
330
+ if (answer.toLowerCase() === 'all') {
331
+ indices = enriched.map((_, i) => i).filter(i => !enriched[i].linked);
332
+ } else {
333
+ indices = [];
334
+ for (const part of answer.split(',')) {
335
+ const trimmed = part.trim();
336
+ const rangeMatch = trimmed.match(/^(\d+)\s*-\s*(\d+)$/);
337
+ if (rangeMatch) {
338
+ const start = parseInt(rangeMatch[1]) - 1;
339
+ const end = parseInt(rangeMatch[2]) - 1;
340
+ for (let i = start; i <= end; i++) indices.push(i);
341
+ } else {
342
+ const n = parseInt(trimmed) - 1;
343
+ if (!isNaN(n)) indices.push(n);
344
+ }
345
+ }
346
+ }
347
+
348
+ // Filter valid, unlinked selections
349
+ const toAdd = [...new Set(indices)]
350
+ .filter(i => i >= 0 && i < enriched.length && !enriched[i].linked)
351
+ .map(i => enriched[i]);
352
+
353
+ if (toAdd.length === 0) {
354
+ console.log(' Nothing to add.\n');
355
+ rl.close();
356
+ return;
357
+ }
358
+
359
+ console.log(`\n Adding ${toAdd.length} project(s)...\n`);
360
+
361
+ for (const repo of toAdd) {
362
+ const prUrlBase = repo.host === 'github'
363
+ ? (repo.org && repo.repoName ? `https://github.com/${repo.org}/${repo.repoName}/pull/` : '')
364
+ : (repo.org && repo.project && repo.repoName
365
+ ? `https://${repo.org}.visualstudio.com/DefaultCollection/${repo.project}/_git/${repo.repoName}/pullrequest/`
366
+ : '');
367
+
368
+ const project = {
369
+ name: repo.name,
370
+ description: repo.description,
371
+ localPath: repo.path.replace(/\\/g, '/'),
372
+ repositoryId: '',
373
+ adoOrg: repo.org,
374
+ adoProject: repo.project,
375
+ repoName: repo.repoName,
376
+ mainBranch: repo.mainBranch,
377
+ prUrlBase,
378
+ workSources: {
379
+ pullRequests: { enabled: true, path: '.squad/pull-requests.json', cooldownMinutes: 30 },
380
+ workItems: { enabled: true, path: '.squad/work-items.json', cooldownMinutes: 0 },
381
+ specs: { enabled: true, filePatterns: ['docs/**/*.md'], statePath: '.squad/spec-tracker.json', lookbackDays: 7 },
382
+ }
383
+ };
384
+
385
+ config.projects.push(project);
386
+
387
+ // Create project-local state files
388
+ const squadDir = path.join(repo.path, '.squad');
389
+ if (!fs.existsSync(squadDir)) fs.mkdirSync(squadDir, { recursive: true });
390
+ for (const [f, content] of Object.entries({ 'pull-requests.json': '[]', 'work-items.json': '[]' })) {
391
+ const fp = path.join(squadDir, f);
392
+ if (!fs.existsSync(fp)) fs.writeFileSync(fp, content);
393
+ }
394
+
395
+ console.log(` + ${repo.name} (${repo.path})`);
396
+ }
397
+
398
+ saveConfig(config);
399
+ console.log(`\n Done. ${config.projects.length} total project(s) linked.`);
400
+ console.log(` Run "node squad.js list" to verify.\n`);
401
+ rl.close();
402
+ }
403
+
247
404
  // ─── CLI ─────────────────────────────────────────────────────────────────────
248
405
 
249
406
  const [cmd, ...rest] = process.argv.slice(2);
250
407
 
251
- function initSquad() {
408
+ async function initSquad() {
252
409
  const config = loadConfig();
253
410
  if (!config.projects) config.projects = [];
254
411
  if (!config.engine) config.engine = { tickInterval: 60000, staleThreshold: 1800000, maxConcurrent: 3, inboxConsolidateThreshold: 5, agentTimeout: 600000, maxTurns: 100 };
@@ -264,13 +421,15 @@ function initSquad() {
264
421
  }
265
422
  saveConfig(config);
266
423
  console.log(`\n Squad initialized at ${SQUAD_HOME}`);
267
- console.log(` Projects: ${config.projects.length}`);
268
- console.log(`\n Add a project:`);
269
- console.log(` node squad add <project-dir>\n`);
424
+ console.log(` Config, agents, and engine defaults created.\n`);
425
+
426
+ // Auto-chain into scan
427
+ console.log(' Now let\'s find your repos...\n');
428
+ await scanAndAdd();
270
429
  }
271
430
 
272
431
  const commands = {
273
- init: () => initSquad(),
432
+ init: () => initSquad().catch(e => { console.error(e); process.exit(1); }),
274
433
  add: () => {
275
434
  const dir = rest[0];
276
435
  if (!dir) { console.log('Usage: node squad add <project-dir>'); process.exit(1); }
@@ -282,6 +441,7 @@ const commands = {
282
441
  removeProject(dir);
283
442
  },
284
443
  list: () => listProjects(),
444
+ scan: () => scanAndAdd().catch(e => { console.error(e); process.exit(1); }),
285
445
  };
286
446
 
287
447
  if (cmd && commands[cmd]) {
@@ -291,7 +451,8 @@ if (cmd && commands[cmd]) {
291
451
  console.log(' Usage: node squad <command>\n');
292
452
  console.log(' Commands:');
293
453
  console.log(' init Initialize squad (no projects)');
294
- console.log(' add <project-dir> Link a project');
454
+ console.log(' scan [dir] [depth] Scan for git repos and multi-select to add');
455
+ console.log(' add <project-dir> Link a single project');
295
456
  console.log(' remove <project-dir> Unlink a project');
296
457
  console.log(' list List linked projects\n');
297
458
  console.log(' After init, also use:');