crewswarm 0.9.4 → 0.9.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +8 -1
- package/README.md +3 -3
- package/apps/dashboard/README.md +49 -0
- package/apps/dashboard/dist/index.html +2 -0
- package/install.sh +2 -2
- package/lib/crew-lead/wave-dispatcher.mjs +53 -3
- package/lib/crew-lead/worktree.mjs +258 -0
- package/lib/crew-lead/ws-router.mjs +43 -0
- package/lib/memory/relevance-scorer.mjs +199 -0
- package/lib/memory/shared-adapter.mjs +85 -19
- package/package.json +6 -1
- package/scripts/dashboard.mjs +183 -4
- package/scripts/install-docker.sh +1 -1
- package/scripts/start.mjs +46 -11
package/.env.example
CHANGED
|
@@ -159,7 +159,14 @@
|
|
|
159
159
|
# ── Ports ─────────────────────────────────────────────────────────────────────
|
|
160
160
|
# CREW_LEAD_PORT=5010 # crew-lead HTTP API
|
|
161
161
|
# SWARM_DASH_PORT=4319 # Dashboard web UI
|
|
162
|
-
# WA_HTTP_PORT=
|
|
162
|
+
# WA_HTTP_PORT=5015 # WhatsApp bridge HTTP
|
|
163
|
+
# STUDIO_PORT=3333 # Vibe IDE
|
|
164
|
+
|
|
165
|
+
# ── Runtime & Config ──────────────────────────────────────────────────────────
|
|
166
|
+
# CREWSWARM_DIR=$HOME/.crewswarm # Override config directory
|
|
167
|
+
# CREWSWARM_RT_AUTH_TOKEN= # RT bus auth token (auto-generated by install.sh)
|
|
168
|
+
# CREWSWARM_MAX_BRIDGES=10 # Max concurrent agent bridge processes
|
|
169
|
+
# CREWSWARM_BIND_HOST=127.0.0.1 # Host to bind all services to (use 0.0.0.0 for remote access)
|
|
163
170
|
|
|
164
171
|
# ── Execution Engines ─────────────────────────────────────────────────────────
|
|
165
172
|
# CREWSWARM_OPENCODE_ENABLED=off # Route coding agents through OpenCode
|
package/README.md
CHANGED
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
[](https://crewswarm.ai)
|
|
10
10
|
[](https://github.com/sponsors/crewswarm)
|
|
11
11
|
|
|
12
|
-

|
|
13
13
|
|
|
14
14
|
---
|
|
15
15
|
|
|
@@ -168,11 +168,11 @@ Or skip API keys entirely — use Claude Code, Cursor, or Gemini CLI with OAuth
|
|
|
168
168
|
## Commands
|
|
169
169
|
|
|
170
170
|
```bash
|
|
171
|
-
crewswarm # Start
|
|
171
|
+
crewswarm # Start full stack: RT bus, crew-lead, bridges, then dashboard
|
|
172
172
|
crewswarm pm-loop # Run autonomous PM loop
|
|
173
173
|
npm run doctor # Preflight check
|
|
174
174
|
npm run restart-all # Restart the stack
|
|
175
|
-
npm test #
|
|
175
|
+
npm test # 4,530 tests, 100% passing
|
|
176
176
|
npm run test:report # View test results summary
|
|
177
177
|
crew exec "Build X" # Send task via CLI
|
|
178
178
|
```
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
# crewswarm-dashboard
|
|
2
|
+
|
|
3
|
+
Real-time control panel for CrewSwarm. Vanilla JS + Vite, no framework dependencies.
|
|
4
|
+
|
|
5
|
+
## Development
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
cd apps/dashboard
|
|
9
|
+
npm install
|
|
10
|
+
npm run dev # http://localhost:5173
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Build
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
npm run build # outputs to dist/
|
|
17
|
+
npm run preview # preview production build
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
## Structure
|
|
21
|
+
|
|
22
|
+
```
|
|
23
|
+
src/
|
|
24
|
+
app.js # Main app entry, tab routing, SSE connections
|
|
25
|
+
styles.css # Global styles (dark theme)
|
|
26
|
+
chat/ # Chat tab (crew-lead conversation)
|
|
27
|
+
tabs/ # Tab modules (Build, Swarm, Agents, Engines, etc.)
|
|
28
|
+
components/ # Shared UI components
|
|
29
|
+
core/ # Core utilities (SSE, state, API client)
|
|
30
|
+
cli-process.js # CLI Process tab
|
|
31
|
+
setup-wizard.js # First-run setup wizard
|
|
32
|
+
orchestration-status.js # Pipeline status display
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Key tabs
|
|
36
|
+
|
|
37
|
+
- **Chat** -- Talk to crew-lead, dispatch tasks
|
|
38
|
+
- **Build** -- One-click build from a requirement
|
|
39
|
+
- **Swarm** -- Active sessions and agent activity
|
|
40
|
+
- **Agents** -- Configure sub-agents, models, permissions
|
|
41
|
+
- **Engines** -- Manage CLI engines (Claude Code, Codex, Gemini, Cursor)
|
|
42
|
+
- **RT Messages** -- Live message bus inspector
|
|
43
|
+
- **Services** -- Health status of all services
|
|
44
|
+
|
|
45
|
+
## Notes
|
|
46
|
+
|
|
47
|
+
- Connects to crew-lead at `http://localhost:5010` by default
|
|
48
|
+
- All state comes from SSE streams and REST API -- no local state management
|
|
49
|
+
- Brotli-compressed `.br` files are pre-built for production serving
|
|
@@ -5582,6 +5582,8 @@
|
|
|
5582
5582
|
<div id="testingContent">
|
|
5583
5583
|
<div class="meta" style="padding: 20px">Loading test results...</div>
|
|
5584
5584
|
</div>
|
|
5585
|
+
<div id="testingChart"></div>
|
|
5586
|
+
<div id="testingCoverage"></div>
|
|
5585
5587
|
<div id="testingHistory"></div>
|
|
5586
5588
|
</div>
|
|
5587
5589
|
|
package/install.sh
CHANGED
|
@@ -121,7 +121,7 @@ if [[ ! -f "$CONFIG_FILE" ]]; then
|
|
|
121
121
|
}
|
|
122
122
|
}
|
|
123
123
|
EOF
|
|
124
|
-
success "Created ~/.crewswarm/
|
|
124
|
+
success "Created ~/.crewswarm/config.json (RT token: $RT_TOKEN)"
|
|
125
125
|
else
|
|
126
126
|
success "~/.crewswarm/crewswarm.json already exists — keeping it"
|
|
127
127
|
fi
|
|
@@ -307,7 +307,7 @@ elif [[ "$SHELL" == *"bash"* ]]; then
|
|
|
307
307
|
SHELL_RC="$HOME/.bash_profile"
|
|
308
308
|
fi
|
|
309
309
|
|
|
310
|
-
BIN_ALIAS="alias crew-cli='node $REPO_DIR/crew-cli.mjs'"
|
|
310
|
+
BIN_ALIAS="alias crew-cli='node $REPO_DIR/crew-cli/dist/crew.mjs'"
|
|
311
311
|
if [[ -n "$SHELL_RC" ]] && ! grep -q "crew-cli" "$SHELL_RC" 2>/dev/null; then
|
|
312
312
|
echo "" >> "$SHELL_RC"
|
|
313
313
|
echo "# CrewSwarm" >> "$SHELL_RC"
|
|
@@ -13,6 +13,12 @@ import { normalizeProjectDir } from "../runtime/project-dir.mjs";
|
|
|
13
13
|
import { loadProjectMessages } from "../chat/project-messages.mjs";
|
|
14
14
|
import * as tmuxBridge from "../bridges/tmux-bridge.mjs";
|
|
15
15
|
import * as sessionManager from "../sessions/session-manager.mjs";
|
|
16
|
+
import {
|
|
17
|
+
isGitRepo,
|
|
18
|
+
createWorktree,
|
|
19
|
+
mergeWorktree,
|
|
20
|
+
cleanupPipelineWorktrees,
|
|
21
|
+
} from "./worktree.mjs";
|
|
16
22
|
|
|
17
23
|
let _deps = {};
|
|
18
24
|
|
|
@@ -295,6 +301,12 @@ export function cancelAllPipelines(sessionId) {
|
|
|
295
301
|
console.log(`[crew-lead] Cancelling pipeline ${pid} (${waveInfo}, ${pipeline.pendingTaskIds.size} pending tasks)`);
|
|
296
302
|
_deps.broadcastSSE?.({ type: "pipeline_cancelled", pipelineId: pid, ts: Date.now() });
|
|
297
303
|
deletePipelineState(pid);
|
|
304
|
+
// Clean up any active worktrees for this pipeline.
|
|
305
|
+
if (pipeline.projectDir) {
|
|
306
|
+
try { cleanupPipelineWorktrees(pipeline.projectDir, pid); } catch (e) {
|
|
307
|
+
console.warn(`[worktree] cleanup on cancel failed for ${pid}: ${e.message}`);
|
|
308
|
+
}
|
|
309
|
+
}
|
|
298
310
|
cancelled++;
|
|
299
311
|
}
|
|
300
312
|
pendingPipelines.clear();
|
|
@@ -425,12 +437,50 @@ export function dispatchPipelineWave(pipelineId) {
|
|
|
425
437
|
}
|
|
426
438
|
|
|
427
439
|
// ── Standard path (individual dispatch per agent) ───────────────────────
|
|
440
|
+
|
|
441
|
+
// ── Worktree isolation (multi-agent waves only) ──────────────────────────
|
|
442
|
+
// When CREWSWARM_WORKTREE_ISOLATION is not "false" (default: enabled for
|
|
443
|
+
// multi-agent waves) AND the pipeline has a projectDir that is a git repo,
|
|
444
|
+
// create an isolated worktree for each agent so parallel file writes don't
|
|
445
|
+
// conflict. Single-agent waves skip worktree overhead by default.
|
|
446
|
+
const worktreeEnabled = (() => {
|
|
447
|
+
const envVal = process.env.CREWSWARM_WORKTREE_ISOLATION;
|
|
448
|
+
if (envVal === "false" || envVal === "0") return false;
|
|
449
|
+
// Per-pipeline spec can also disable it.
|
|
450
|
+
if (pipeline.worktreeIsolation === false) return false;
|
|
451
|
+
return waveSteps.length > 1;
|
|
452
|
+
})();
|
|
453
|
+
|
|
454
|
+
if (!pipeline.worktrees) pipeline.worktrees = new Map();
|
|
455
|
+
|
|
456
|
+
if (worktreeEnabled && pipeline.projectDir) {
|
|
457
|
+
let repoConfirmed = false;
|
|
458
|
+
try { repoConfirmed = isGitRepo(pipeline.projectDir); } catch {}
|
|
459
|
+
|
|
460
|
+
if (repoConfirmed) {
|
|
461
|
+
console.log(`[worktree] pipeline ${pipelineId.slice(0, 8)} wave ${currentWave + 1}: creating worktrees for ${waveSteps.length} agent(s)`);
|
|
462
|
+
for (const step of waveSteps) {
|
|
463
|
+
const agentId = step.agent;
|
|
464
|
+
const wtPath = createWorktree(pipeline.projectDir, pipelineId, currentWave, agentId);
|
|
465
|
+
if (wtPath) {
|
|
466
|
+
pipeline.worktrees.set(agentId, { path: wtPath, waveIndex: currentWave });
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
|
|
428
472
|
for (const step of waveSteps) {
|
|
473
|
+
// Use per-agent worktree path if one was created, otherwise use the shared projectDir.
|
|
474
|
+
const agentWorktree = pipeline.worktrees?.get(step.agent);
|
|
475
|
+
const effectiveProjectDir = agentWorktree?.waveIndex === currentWave
|
|
476
|
+
? agentWorktree.path
|
|
477
|
+
: pipeline.projectDir;
|
|
478
|
+
|
|
429
479
|
let taskText = projectRootBanner + step.task + contextBlock;
|
|
430
480
|
// QA always writes to projectDir/qa-report.md so reports aren't random filenames
|
|
431
481
|
const isQa = step.agent === "crew-qa" || (step.agent && step.agent.includes("qa"));
|
|
432
|
-
if (isQa &&
|
|
433
|
-
taskText += `\n\nWrite your report to ${
|
|
482
|
+
if (isQa && effectiveProjectDir && !/qa-report\.md|Write your report to/i.test(taskText)) {
|
|
483
|
+
taskText += `\n\nWrite your report to ${effectiveProjectDir}/qa-report.md (no other filename).`;
|
|
434
484
|
}
|
|
435
485
|
const stepSpec = {
|
|
436
486
|
task: taskText,
|
|
@@ -445,7 +495,7 @@ export function dispatchPipelineWave(pipelineId) {
|
|
|
445
495
|
const taskId = dispatchTask(step.agent, stepSpec, sessionId, {
|
|
446
496
|
pipelineId,
|
|
447
497
|
waveIndex: currentWave,
|
|
448
|
-
projectDir:
|
|
498
|
+
projectDir: effectiveProjectDir,
|
|
449
499
|
originProjectId: pipeline.originProjectId,
|
|
450
500
|
originChannel: pipeline.originChannel,
|
|
451
501
|
originThreadId: pipeline.originThreadId,
|
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Git worktree isolation helpers for parallel wave dispatch.
|
|
3
|
+
* Each agent in a multi-agent wave gets its own git worktree so they can't
|
|
4
|
+
* conflict with each other on the filesystem.
|
|
5
|
+
*
|
|
6
|
+
* All operations are wrapped in try/catch — if git fails, callers fall back
|
|
7
|
+
* to the shared directory.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { execSync } from "node:child_process";
|
|
11
|
+
import path from "node:path";
|
|
12
|
+
import fs from "node:fs";
|
|
13
|
+
|
|
14
|
+
// ── Naming helpers ───────────────────────────────────────────────────────────
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Return the deterministic worktree path for an agent in a pipeline wave.
|
|
18
|
+
* Format: /tmp/crewswarm-wt-{pipelineId.slice(0,8)}-{agentId}
|
|
19
|
+
*/
|
|
20
|
+
export function worktreePath(pipelineId, agentId) {
|
|
21
|
+
return `/tmp/crewswarm-wt-${pipelineId.slice(0, 8)}-${agentId}`;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Return the deterministic branch name for an agent in a pipeline wave.
|
|
26
|
+
* Format: crewswarm/wave-{pipelineId.slice(0,8)}-{agentId}
|
|
27
|
+
*/
|
|
28
|
+
export function worktreeBranch(pipelineId, agentId) {
|
|
29
|
+
return `crewswarm/wave-${pipelineId.slice(0, 8)}-${agentId}`;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// ── Core helpers ─────────────────────────────────────────────────────────────
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Check if a directory is inside a git repository.
|
|
36
|
+
* Returns true if git reports it is inside a work tree, false otherwise.
|
|
37
|
+
*/
|
|
38
|
+
export function isGitRepo(dir) {
|
|
39
|
+
try {
|
|
40
|
+
const result = execSync("git rev-parse --is-inside-work-tree", {
|
|
41
|
+
cwd: dir,
|
|
42
|
+
encoding: "utf8",
|
|
43
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
44
|
+
timeout: 5000,
|
|
45
|
+
}).trim();
|
|
46
|
+
return result === "true";
|
|
47
|
+
} catch {
|
|
48
|
+
return false;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Create a git worktree for an agent's wave task.
|
|
54
|
+
*
|
|
55
|
+
* @param {string} projectDir - The shared project directory (must be a git repo).
|
|
56
|
+
* @param {string} pipelineId - The pipeline ID (used for naming).
|
|
57
|
+
* @param {number} waveIndex - The zero-based wave index (informational, used in logs).
|
|
58
|
+
* @param {string} agentId - The agent ID (used for naming).
|
|
59
|
+
* @returns {string|null} The worktree path, or null if git isn't available or
|
|
60
|
+
* projectDir isn't a git repo.
|
|
61
|
+
*/
|
|
62
|
+
export function createWorktree(projectDir, pipelineId, waveIndex, agentId) {
|
|
63
|
+
try {
|
|
64
|
+
if (!projectDir || !isGitRepo(projectDir)) {
|
|
65
|
+
console.log(`[worktree] ${agentId}: projectDir is not a git repo — skipping worktree`);
|
|
66
|
+
return null;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
const wtPath = worktreePath(pipelineId, agentId);
|
|
70
|
+
const branch = worktreeBranch(pipelineId, agentId);
|
|
71
|
+
|
|
72
|
+
// Remove stale worktree at the same path if it exists (e.g. crashed previous run).
|
|
73
|
+
if (fs.existsSync(wtPath)) {
|
|
74
|
+
console.log(`[worktree] ${agentId}: stale worktree found at ${wtPath} — removing`);
|
|
75
|
+
try {
|
|
76
|
+
execSync(`git worktree remove --force "${wtPath}"`, {
|
|
77
|
+
cwd: projectDir, encoding: "utf8", stdio: ["pipe", "pipe", "pipe"], timeout: 10000,
|
|
78
|
+
});
|
|
79
|
+
} catch {
|
|
80
|
+
// If git worktree remove fails, try cleaning up the directory directly.
|
|
81
|
+
try { fs.rmSync(wtPath, { recursive: true, force: true }); } catch {}
|
|
82
|
+
}
|
|
83
|
+
// Also delete the branch if it was left dangling.
|
|
84
|
+
try {
|
|
85
|
+
execSync(`git branch -D "${branch}"`, {
|
|
86
|
+
cwd: projectDir, encoding: "utf8", stdio: ["pipe", "pipe", "pipe"], timeout: 5000,
|
|
87
|
+
});
|
|
88
|
+
} catch {}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Create the worktree on a new branch forked from the current HEAD.
|
|
92
|
+
execSync(`git worktree add -b "${branch}" "${wtPath}" HEAD`, {
|
|
93
|
+
cwd: projectDir,
|
|
94
|
+
encoding: "utf8",
|
|
95
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
96
|
+
timeout: 15000,
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
console.log(`[worktree] wave ${waveIndex + 1} ${agentId}: created worktree at ${wtPath} (branch: ${branch})`);
|
|
100
|
+
return wtPath;
|
|
101
|
+
} catch (e) {
|
|
102
|
+
console.error(`[worktree] ${agentId}: failed to create worktree — ${e.message}`);
|
|
103
|
+
return null;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Merge a worktree branch back into the current branch (usually main/HEAD) and
|
|
109
|
+
* clean up the worktree + branch.
|
|
110
|
+
*
|
|
111
|
+
* @param {string} projectDir - The shared project directory.
|
|
112
|
+
* @param {string} pipelineId - The pipeline ID.
|
|
113
|
+
* @param {number} waveIndex - The zero-based wave index (informational).
|
|
114
|
+
* @param {string} agentId - The agent ID.
|
|
115
|
+
* @returns {{ ok: boolean, conflicts?: string[], merged_files?: string[] }}
|
|
116
|
+
*/
|
|
117
|
+
export function mergeWorktree(projectDir, pipelineId, waveIndex, agentId) {
|
|
118
|
+
const wtPath = worktreePath(pipelineId, agentId);
|
|
119
|
+
const branch = worktreeBranch(pipelineId, agentId);
|
|
120
|
+
|
|
121
|
+
// If the worktree path doesn't even exist, nothing to do.
|
|
122
|
+
if (!fs.existsSync(wtPath)) {
|
|
123
|
+
console.log(`[worktree] ${agentId}: worktree at ${wtPath} not found — skipping merge`);
|
|
124
|
+
return { ok: true, merged_files: [] };
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
try {
|
|
128
|
+
// Collect files that changed in the worktree branch vs the shared repo HEAD
|
|
129
|
+
// so we can report them even if the merge is a no-op.
|
|
130
|
+
let mergedFiles = [];
|
|
131
|
+
try {
|
|
132
|
+
const diffOutput = execSync(`git diff --name-only HEAD "${branch}"`, {
|
|
133
|
+
cwd: projectDir,
|
|
134
|
+
encoding: "utf8",
|
|
135
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
136
|
+
timeout: 10000,
|
|
137
|
+
}).trim();
|
|
138
|
+
mergedFiles = diffOutput ? diffOutput.split("\n").filter(Boolean) : [];
|
|
139
|
+
} catch {}
|
|
140
|
+
|
|
141
|
+
// Perform the merge (--no-ff to keep history readable).
|
|
142
|
+
execSync(`git merge --no-ff -m "crewswarm: merge wave ${waveIndex + 1} ${agentId}" "${branch}"`, {
|
|
143
|
+
cwd: projectDir,
|
|
144
|
+
encoding: "utf8",
|
|
145
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
146
|
+
timeout: 30000,
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
console.log(`[worktree] wave ${waveIndex + 1} ${agentId}: merged ${mergedFiles.length} file(s) from ${branch}`);
|
|
150
|
+
_cleanupWorktree(projectDir, wtPath, branch);
|
|
151
|
+
return { ok: true, merged_files: mergedFiles };
|
|
152
|
+
} catch (e) {
|
|
153
|
+
// Check if it's a merge conflict.
|
|
154
|
+
const isConflict = /CONFLICT|Automatic merge failed/i.test(e.message || "");
|
|
155
|
+
if (isConflict) {
|
|
156
|
+
// Collect conflict file names.
|
|
157
|
+
let conflicts = [];
|
|
158
|
+
try {
|
|
159
|
+
const conflictOutput = execSync("git diff --name-only --diff-filter=U", {
|
|
160
|
+
cwd: projectDir,
|
|
161
|
+
encoding: "utf8",
|
|
162
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
163
|
+
timeout: 5000,
|
|
164
|
+
}).trim();
|
|
165
|
+
conflicts = conflictOutput ? conflictOutput.split("\n").filter(Boolean) : [];
|
|
166
|
+
} catch {}
|
|
167
|
+
|
|
168
|
+
// Abort the merge so the repo stays clean.
|
|
169
|
+
try {
|
|
170
|
+
execSync("git merge --abort", {
|
|
171
|
+
cwd: projectDir,
|
|
172
|
+
encoding: "utf8",
|
|
173
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
174
|
+
timeout: 10000,
|
|
175
|
+
});
|
|
176
|
+
} catch {}
|
|
177
|
+
|
|
178
|
+
console.error(`[worktree] wave ${waveIndex + 1} ${agentId}: merge conflicts in ${conflicts.length} file(s): ${conflicts.join(", ")}`);
|
|
179
|
+
_cleanupWorktree(projectDir, wtPath, branch);
|
|
180
|
+
return { ok: false, conflicts };
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// Other error — still attempt cleanup.
|
|
184
|
+
console.error(`[worktree] ${agentId}: merge failed — ${e.message}`);
|
|
185
|
+
_cleanupWorktree(projectDir, wtPath, branch);
|
|
186
|
+
return { ok: false, conflicts: [] };
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
/**
|
|
191
|
+
* Clean up all worktrees for a pipeline (called on pipeline completion or cancellation).
|
|
192
|
+
*
|
|
193
|
+
* @param {string} projectDir - The shared project directory.
|
|
194
|
+
* @param {string} pipelineId - The pipeline ID whose worktrees should be removed.
|
|
195
|
+
*/
|
|
196
|
+
export function cleanupPipelineWorktrees(projectDir, pipelineId) {
|
|
197
|
+
const prefix = `/tmp/crewswarm-wt-${pipelineId.slice(0, 8)}-`;
|
|
198
|
+
const branchPrefix = `crewswarm/wave-${pipelineId.slice(0, 8)}-`;
|
|
199
|
+
|
|
200
|
+
// Find all matching worktree paths under /tmp.
|
|
201
|
+
let wtDirs = [];
|
|
202
|
+
try {
|
|
203
|
+
wtDirs = fs.readdirSync("/tmp")
|
|
204
|
+
.filter(name => name.startsWith(`crewswarm-wt-${pipelineId.slice(0, 8)}-`))
|
|
205
|
+
.map(name => path.join("/tmp", name));
|
|
206
|
+
} catch {}
|
|
207
|
+
|
|
208
|
+
for (const wtPath of wtDirs) {
|
|
209
|
+
// Derive the agentId from the path suffix after the pipeline prefix.
|
|
210
|
+
const agentId = wtPath.slice(prefix.length);
|
|
211
|
+
const branch = `${branchPrefix}${agentId}`;
|
|
212
|
+
_cleanupWorktree(projectDir, wtPath, branch);
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
if (wtDirs.length > 0) {
|
|
216
|
+
console.log(`[worktree] pipeline ${pipelineId.slice(0, 8)}: cleaned up ${wtDirs.length} worktree(s)`);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
// ── Internal helpers ─────────────────────────────────────────────────────────
|
|
221
|
+
|
|
222
|
+
/**
|
|
223
|
+
* Remove a worktree directory and delete its tracking branch.
|
|
224
|
+
* Silently ignores errors so callers always continue.
|
|
225
|
+
*/
|
|
226
|
+
function _cleanupWorktree(projectDir, wtPath, branch) {
|
|
227
|
+
// git worktree remove
|
|
228
|
+
if (fs.existsSync(wtPath)) {
|
|
229
|
+
try {
|
|
230
|
+
execSync(`git worktree remove --force "${wtPath}"`, {
|
|
231
|
+
cwd: projectDir,
|
|
232
|
+
encoding: "utf8",
|
|
233
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
234
|
+
timeout: 10000,
|
|
235
|
+
});
|
|
236
|
+
console.log(`[worktree] removed worktree at ${wtPath}`);
|
|
237
|
+
} catch (e) {
|
|
238
|
+
// Last resort: rm -rf the directory.
|
|
239
|
+
console.warn(`[worktree] git worktree remove failed for ${wtPath} — ${e.message}; falling back to rm`);
|
|
240
|
+
try { fs.rmSync(wtPath, { recursive: true, force: true }); } catch {}
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
// Delete the branch.
|
|
245
|
+
if (projectDir && branch) {
|
|
246
|
+
try {
|
|
247
|
+
execSync(`git branch -D "${branch}"`, {
|
|
248
|
+
cwd: projectDir,
|
|
249
|
+
encoding: "utf8",
|
|
250
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
251
|
+
timeout: 5000,
|
|
252
|
+
});
|
|
253
|
+
console.log(`[worktree] deleted branch ${branch}`);
|
|
254
|
+
} catch {
|
|
255
|
+
// Branch may already be gone — that's fine.
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
}
|
|
@@ -3,6 +3,7 @@ import path from "node:path";
|
|
|
3
3
|
import os from "node:os";
|
|
4
4
|
import fs from "node:fs";
|
|
5
5
|
import { applyProjectDirToPipelineSteps } from "../dispatch/parsers.mjs";
|
|
6
|
+
import { mergeWorktree } from "./worktree.mjs";
|
|
6
7
|
|
|
7
8
|
let reconnectTimer = null;
|
|
8
9
|
let isConnecting = false;
|
|
@@ -482,6 +483,48 @@ export function initWsRouter(deps) {
|
|
|
482
483
|
if (pipeline.pendingTaskIds.size === 0) {
|
|
483
484
|
if (!pipeline.completedWaveResults) pipeline.completedWaveResults = [];
|
|
484
485
|
pipeline.completedWaveResults.push([...pipeline.waveResults]);
|
|
486
|
+
|
|
487
|
+
// ── Merge worktrees back into the shared branch ───────────
|
|
488
|
+
// If this wave used per-agent worktrees, merge them now that
|
|
489
|
+
// all agents have finished. Report any conflicts via SSE.
|
|
490
|
+
if (pipeline.worktrees?.size > 0 && pipeline.projectDir) {
|
|
491
|
+
const waveIdx = pipeline.currentWave;
|
|
492
|
+
const mergeResults = [];
|
|
493
|
+
const allConflicts = [];
|
|
494
|
+
for (const [agentId, wtMeta] of pipeline.worktrees) {
|
|
495
|
+
if (wtMeta.waveIndex !== waveIdx) continue;
|
|
496
|
+
try {
|
|
497
|
+
const result = mergeWorktree(pipeline.projectDir, dispatch.pipelineId, waveIdx, agentId);
|
|
498
|
+
mergeResults.push({ agentId, ...result });
|
|
499
|
+
if (!result.ok && result.conflicts?.length) {
|
|
500
|
+
allConflicts.push(...result.conflicts.map(f => `${agentId}:${f}`));
|
|
501
|
+
}
|
|
502
|
+
} catch (e) {
|
|
503
|
+
console.warn(`[worktree] merge failed for ${agentId}: ${e.message}`);
|
|
504
|
+
mergeResults.push({ agentId, ok: false, conflicts: [] });
|
|
505
|
+
}
|
|
506
|
+
pipeline.worktrees.delete(agentId);
|
|
507
|
+
}
|
|
508
|
+
if (mergeResults.length > 0) {
|
|
509
|
+
broadcastSSE?.({
|
|
510
|
+
type: "pipeline_worktree_merged",
|
|
511
|
+
pipelineId: dispatch.pipelineId,
|
|
512
|
+
waveIndex: waveIdx,
|
|
513
|
+
results: mergeResults,
|
|
514
|
+
conflicts: allConflicts,
|
|
515
|
+
ts: Date.now(),
|
|
516
|
+
});
|
|
517
|
+
if (allConflicts.length > 0) {
|
|
518
|
+
appendHistory?.(
|
|
519
|
+
"default",
|
|
520
|
+
pipeline.sessionId || "owner",
|
|
521
|
+
"system",
|
|
522
|
+
`Pipeline wave ${waveIdx + 1} worktree merge had ${allConflicts.length} conflict(s): ${allConflicts.join(", ")}. Manual resolution may be required.`,
|
|
523
|
+
);
|
|
524
|
+
}
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
|
|
485
528
|
const gateResult = checkWaveQualityGate(pipeline, dispatch.pipelineId);
|
|
486
529
|
if (gateResult.pass) {
|
|
487
530
|
pipeline.currentWave++;
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Memory Relevance Scorer
|
|
3
|
+
*
|
|
4
|
+
* Score memories by relevance to a query using:
|
|
5
|
+
* 1. Recency — newer memories score higher (exponential decay, ~30-day half-life)
|
|
6
|
+
* 2. Frequency — memories accessed more often score higher (normalised log)
|
|
7
|
+
* 3. Keyword — TF-IDF-like scoring against query terms (inverse-length weighting)
|
|
8
|
+
* 4. Context — memories from the same project/agent/session score higher
|
|
9
|
+
*
|
|
10
|
+
* Pure functions only — zero I/O, zero dependencies.
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
// ─── Internal helpers ────────────────────────────────────────────────────────
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Tokenise a string into lowercase alpha-numeric tokens of length >= 2.
|
|
17
|
+
* @param {string} text
|
|
18
|
+
* @returns {string[]}
|
|
19
|
+
*/
|
|
20
|
+
function tokenise(text) {
|
|
21
|
+
if (!text || typeof text !== 'string') return [];
|
|
22
|
+
return text
|
|
23
|
+
.toLowerCase()
|
|
24
|
+
.replace(/[^a-z0-9\s_-]/g, ' ')
|
|
25
|
+
.split(/\s+/)
|
|
26
|
+
.filter(t => t.length >= 2);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// ─── Individual scoring components ──────────────────────────────────────────
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Recency score: exponential decay with ~30-day half-life.
|
|
33
|
+
* Returns 1.0 for brand-new memories, approaching 0 for very old ones.
|
|
34
|
+
*
|
|
35
|
+
* @param {string|number|Date} timestamp - ISO string, epoch ms, or Date
|
|
36
|
+
* @param {number} nowMs - current epoch ms (injectable for testing)
|
|
37
|
+
* @returns {number} [0, 1]
|
|
38
|
+
*/
|
|
39
|
+
export function computeRecency(timestamp, nowMs = Date.now()) {
|
|
40
|
+
if (timestamp == null) return 0;
|
|
41
|
+
const createdAt = timestamp instanceof Date
|
|
42
|
+
? timestamp.getTime()
|
|
43
|
+
: new Date(timestamp).getTime();
|
|
44
|
+
if (Number.isNaN(createdAt)) return 0;
|
|
45
|
+
const daysSince = Math.max(0, (nowMs - createdAt) / (1000 * 60 * 60 * 24));
|
|
46
|
+
return Math.exp(-daysSince / 30);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Frequency score: log-normalised access count relative to a max.
|
|
51
|
+
* Both accessCount and maxAccessCount must be >= 0.
|
|
52
|
+
*
|
|
53
|
+
* @param {number} accessCount
|
|
54
|
+
* @param {number} maxAccessCount - upper bound for normalisation (default 100)
|
|
55
|
+
* @returns {number} [0, 1]
|
|
56
|
+
*/
|
|
57
|
+
export function computeFrequency(accessCount, maxAccessCount = 100) {
|
|
58
|
+
const count = Math.max(0, Number(accessCount) || 0);
|
|
59
|
+
const maxCount = Math.max(1, Number(maxAccessCount) || 100);
|
|
60
|
+
return Math.min(1, Math.log(1 + count) / Math.log(1 + maxCount));
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Keyword match score: TF-IDF-like overlap between query tokens and memory content.
|
|
65
|
+
* Rarer (longer) query words are weighted more heavily.
|
|
66
|
+
*
|
|
67
|
+
* @param {string} content - memory content
|
|
68
|
+
* @param {string} query
|
|
69
|
+
* @returns {number} [0, 1]
|
|
70
|
+
*/
|
|
71
|
+
export function computeKeywordMatch(content, query) {
|
|
72
|
+
const queryTokens = tokenise(query);
|
|
73
|
+
const contentTokens = tokenise(content);
|
|
74
|
+
|
|
75
|
+
if (queryTokens.length === 0 || contentTokens.length === 0) return 0;
|
|
76
|
+
|
|
77
|
+
const contentSet = new Set(contentTokens);
|
|
78
|
+
|
|
79
|
+
// Weight each query token by its length (longer words are more specific)
|
|
80
|
+
let weightedMatch = 0;
|
|
81
|
+
let totalWeight = 0;
|
|
82
|
+
|
|
83
|
+
for (const token of queryTokens) {
|
|
84
|
+
// IDF proxy: weight proportional to token length (longer = rarer heuristic)
|
|
85
|
+
const weight = Math.log(1 + token.length);
|
|
86
|
+
totalWeight += weight;
|
|
87
|
+
if (contentSet.has(token)) {
|
|
88
|
+
weightedMatch += weight;
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if (totalWeight === 0) return 0;
|
|
93
|
+
return weightedMatch / totalWeight;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Context match score: bonus points for shared project / agent / session.
|
|
98
|
+
*
|
|
99
|
+
* @param {object} memory - memory object with optional projectId, agentId, sessionId
|
|
100
|
+
* @param {object} context - { projectId?, agentId?, sessionId? }
|
|
101
|
+
* @returns {number} [0, 1]
|
|
102
|
+
*/
|
|
103
|
+
export function computeContextMatch(memory, context = {}) {
|
|
104
|
+
if (!memory || !context) return 0;
|
|
105
|
+
|
|
106
|
+
let score = 0;
|
|
107
|
+
|
|
108
|
+
if (context.projectId && memory.projectId &&
|
|
109
|
+
context.projectId === memory.projectId) {
|
|
110
|
+
score += 0.5;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
if (context.agentId && memory.agentId &&
|
|
114
|
+
context.agentId === memory.agentId) {
|
|
115
|
+
score += 0.3;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
if (context.sessionId && memory.sessionId &&
|
|
119
|
+
context.sessionId === memory.sessionId) {
|
|
120
|
+
score += 0.2;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// Cap at 1.0
|
|
124
|
+
return Math.min(1, score);
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// ─── Public API ──────────────────────────────────────────────────────────────
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Score a single memory object for relevance to a query + context.
|
|
131
|
+
*
|
|
132
|
+
* Expected memory shape (all fields optional except content):
|
|
133
|
+
* {
|
|
134
|
+
* content: string,
|
|
135
|
+
* timestamp: string|number|Date, // ISO or epoch ms
|
|
136
|
+
* accessCount: number,
|
|
137
|
+
* projectId: string,
|
|
138
|
+
* agentId: string,
|
|
139
|
+
* sessionId: string,
|
|
140
|
+
* }
|
|
141
|
+
*
|
|
142
|
+
* @param {object} memory
|
|
143
|
+
* @param {string} query
|
|
144
|
+
* @param {object} [context] - { projectId?, agentId?, sessionId? }
|
|
145
|
+
* @param {object} [opts]
|
|
146
|
+
* @param {number} [opts.nowMs] - override current time (for testing)
|
|
147
|
+
* @param {number} [opts.maxAccessCount] - normalisation ceiling for frequency
|
|
148
|
+
* @returns {number} weighted relevance score in [0, 1]
|
|
149
|
+
*/
|
|
150
|
+
export function scoreMemory(memory, query, context = {}, opts = {}) {
|
|
151
|
+
if (!memory) return 0;
|
|
152
|
+
|
|
153
|
+
const nowMs = opts.nowMs != null ? opts.nowMs : Date.now();
|
|
154
|
+
const maxAccessCount = opts.maxAccessCount != null ? opts.maxAccessCount : 100;
|
|
155
|
+
|
|
156
|
+
const recencyScore = computeRecency(memory.timestamp, nowMs);
|
|
157
|
+
const frequencyScore = computeFrequency(memory.accessCount || 0, maxAccessCount);
|
|
158
|
+
const keywordScore = computeKeywordMatch(memory.content || '', query);
|
|
159
|
+
const contextScore = computeContextMatch(memory, context);
|
|
160
|
+
|
|
161
|
+
return (
|
|
162
|
+
0.30 * recencyScore +
|
|
163
|
+
0.20 * frequencyScore +
|
|
164
|
+
0.35 * keywordScore +
|
|
165
|
+
0.15 * contextScore
|
|
166
|
+
);
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
/**
|
|
170
|
+
* Rank an array of memories by relevance and return the top N.
|
|
171
|
+
* Attaches a `relevanceScore` property to each returned object.
|
|
172
|
+
*
|
|
173
|
+
* @param {object[]} memories
|
|
174
|
+
* @param {string} query
|
|
175
|
+
* @param {object} [context] - { projectId?, agentId?, sessionId? }
|
|
176
|
+
* @param {number} [maxResults=10]
|
|
177
|
+
* @param {object} [opts] - forwarded to scoreMemory
|
|
178
|
+
* @returns {object[]} sorted slice with relevanceScore attached
|
|
179
|
+
*/
|
|
180
|
+
export function rankMemories(memories, query, context = {}, maxResults = 10, opts = {}) {
|
|
181
|
+
if (!Array.isArray(memories) || memories.length === 0) return [];
|
|
182
|
+
|
|
183
|
+
return memories
|
|
184
|
+
.map(m => ({ ...m, relevanceScore: scoreMemory(m, query, context, opts) }))
|
|
185
|
+
.sort((a, b) => b.relevanceScore - a.relevanceScore)
|
|
186
|
+
.slice(0, Math.max(1, maxResults));
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Derive the max accessCount from a collection of memories.
|
|
191
|
+
* Useful for caller-side normalisation when passing opts.maxAccessCount.
|
|
192
|
+
*
|
|
193
|
+
* @param {object[]} memories
|
|
194
|
+
* @returns {number}
|
|
195
|
+
*/
|
|
196
|
+
export function maxAccessCount(memories) {
|
|
197
|
+
if (!Array.isArray(memories) || memories.length === 0) return 0;
|
|
198
|
+
return memories.reduce((max, m) => Math.max(max, m.accessCount || 0), 0);
|
|
199
|
+
}
|
|
@@ -12,6 +12,7 @@ import fs from 'node:fs';
|
|
|
12
12
|
import path from 'node:path';
|
|
13
13
|
import os from 'node:os';
|
|
14
14
|
import { createRequire } from 'node:module';
|
|
15
|
+
import { rankMemories, maxAccessCount } from './relevance-scorer.mjs';
|
|
15
16
|
|
|
16
17
|
const require = createRequire(import.meta.url);
|
|
17
18
|
|
|
@@ -124,41 +125,106 @@ export function rememberFact(agentId, content, options = {}) {
|
|
|
124
125
|
|
|
125
126
|
/**
|
|
126
127
|
* Recall memory context for a task using MemoryBroker (blends AgentKeeper + AgentMemory + Collections).
|
|
128
|
+
* ENHANCED: Applies relevance scoring (recency + frequency + keyword + context) and tracks access metadata.
|
|
127
129
|
* ENHANCED: Also includes relevant conversation history from project messages.
|
|
128
130
|
* @param {string} projectDir - Project directory
|
|
129
131
|
* @param {string} query - Task description or search query
|
|
130
|
-
* @param {object} options - { maxResults?, includeDocs?, includeCode?, pathHints?, preferSuccessful?, userId?, projectId? }
|
|
132
|
+
* @param {object} options - { maxResults?, includeDocs?, includeCode?, pathHints?, preferSuccessful?, userId?, projectId?, agentId?, sessionId? }
|
|
131
133
|
* @returns {Promise<string>} - Formatted context block
|
|
132
134
|
*/
|
|
133
135
|
export async function recallMemoryContext(projectDir, query, options = {}) {
|
|
134
136
|
const broker = getMemoryBroker(projectDir, { crewId: options.crewId || 'crew-lead' });
|
|
135
|
-
|
|
137
|
+
|
|
136
138
|
let memoryContext = '';
|
|
137
|
-
|
|
139
|
+
|
|
138
140
|
// Get standard memory (AgentKeeper + AgentMemory + Collections)
|
|
139
|
-
//
|
|
141
|
+
// Fetch a larger candidate set so the relevance ranker has room to reorder
|
|
140
142
|
if (broker) {
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
143
|
+
const maxResults = options.maxResults || 5;
|
|
144
|
+
const candidateLimit = Math.max(maxResults * 3, 15);
|
|
145
|
+
|
|
146
|
+
// Pull raw structured hits when available, fall back to formatted context
|
|
147
|
+
let rawHits = null;
|
|
148
|
+
if (typeof broker.recall === 'function') {
|
|
149
|
+
try {
|
|
150
|
+
rawHits = await broker.recall(query, {
|
|
151
|
+
maxResults: candidateLimit,
|
|
152
|
+
includeDocs: options.includeDocs !== false,
|
|
153
|
+
includeCode: Boolean(options.includeCode),
|
|
154
|
+
pathHints: options.pathHints || [],
|
|
155
|
+
preferSuccessful: options.preferSuccessful !== false,
|
|
156
|
+
minScore: 0.7,
|
|
157
|
+
excludeFailed: true,
|
|
158
|
+
excludeErrors: true,
|
|
159
|
+
excludeTimeouts: true
|
|
160
|
+
});
|
|
161
|
+
} catch {
|
|
162
|
+
rawHits = null;
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
if (rawHits && Array.isArray(rawHits) && rawHits.length > 0) {
|
|
167
|
+
// Build a relevance context for the ranker
|
|
168
|
+
const scoringContext = {
|
|
169
|
+
projectId: options.projectId,
|
|
170
|
+
agentId: options.agentId,
|
|
171
|
+
sessionId: options.sessionId
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
// Normalise hits to the shape scoreMemory expects
|
|
175
|
+
const now = Date.now();
|
|
176
|
+
const normalised = rawHits.map(hit => ({
|
|
177
|
+
...hit,
|
|
178
|
+
content: hit.text || hit.content || '',
|
|
179
|
+
timestamp: hit.metadata?.timestamp || hit.timestamp || new Date(now - 86400000).toISOString(),
|
|
180
|
+
accessCount: (hit.accessCount || 0) + 1, // count this retrieval
|
|
181
|
+
lastAccessed: new Date(now).toISOString(),
|
|
182
|
+
projectId: hit.metadata?.projectId || hit.projectId,
|
|
183
|
+
agentId: hit.metadata?.agentId || hit.agentId,
|
|
184
|
+
sessionId: hit.metadata?.sessionId || hit.sessionId
|
|
185
|
+
}));
|
|
186
|
+
|
|
187
|
+
const scoringOpts = { nowMs: now, maxAccessCount: maxAccessCount(normalised) };
|
|
188
|
+
const ranked = rankMemories(normalised, query, scoringContext, maxResults, scoringOpts);
|
|
189
|
+
|
|
190
|
+
// Re-serialise to context string (mirrors broker.recallAsContext format)
|
|
191
|
+
if (ranked.length > 0) {
|
|
192
|
+
const lines = ranked.map((hit, i) => {
|
|
193
|
+
const score = hit.relevanceScore.toFixed(3);
|
|
194
|
+
const source = hit.source || hit.metadata?.source || 'memory';
|
|
195
|
+
const title = hit.title || hit.metadata?.title || `Result ${i + 1}`;
|
|
196
|
+
return `[${source}] ${title} (relevance: ${score})\n${hit.content}`;
|
|
197
|
+
});
|
|
198
|
+
memoryContext = lines.join('\n\n---\n\n');
|
|
199
|
+
}
|
|
200
|
+
} else {
|
|
201
|
+
// Fallback: broker doesn't expose raw hits — use formatted context as-is
|
|
202
|
+
try {
|
|
203
|
+
memoryContext = await broker.recallAsContext(query, {
|
|
204
|
+
maxResults: options.maxResults || 5,
|
|
205
|
+
includeDocs: options.includeDocs !== false,
|
|
206
|
+
includeCode: Boolean(options.includeCode),
|
|
207
|
+
pathHints: options.pathHints || [],
|
|
208
|
+
preferSuccessful: options.preferSuccessful !== false,
|
|
209
|
+
// Quality filters to prevent context contamination
|
|
210
|
+
minScore: 0.7,
|
|
211
|
+
excludeFailed: true,
|
|
212
|
+
excludeErrors: true,
|
|
213
|
+
excludeTimeouts: true
|
|
214
|
+
});
|
|
215
|
+
} catch {
|
|
216
|
+
memoryContext = '';
|
|
217
|
+
}
|
|
218
|
+
}
|
|
153
219
|
}
|
|
154
|
-
|
|
220
|
+
|
|
155
221
|
// ENHANCEMENT: Add relevant conversation history from project messages
|
|
156
222
|
// This lets agents see past discussions about the same topic
|
|
157
223
|
if (options.projectId) {
|
|
158
224
|
try {
|
|
159
225
|
const ragModule = await getProjectMessagesRag();
|
|
160
226
|
const conversationContext = ragModule?.getConversationContext(options.projectId, query, 3);
|
|
161
|
-
|
|
227
|
+
|
|
162
228
|
if (conversationContext) {
|
|
163
229
|
memoryContext += (memoryContext ? '\n\n' : '') + conversationContext;
|
|
164
230
|
}
|
|
@@ -167,7 +233,7 @@ export async function recallMemoryContext(projectDir, query, options = {}) {
|
|
|
167
233
|
console.warn('[shared-adapter] Failed to load conversation context:', e.message);
|
|
168
234
|
}
|
|
169
235
|
}
|
|
170
|
-
|
|
236
|
+
|
|
171
237
|
return memoryContext;
|
|
172
238
|
}
|
|
173
239
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "crewswarm",
|
|
3
|
-
"version": "0.9.
|
|
3
|
+
"version": "0.9.5",
|
|
4
4
|
"description": "Local-first multi-agent orchestration platform — coordinate AI coding agents, LLMs, and tools from a single dashboard",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"license": "MIT",
|
|
@@ -8,6 +8,10 @@
|
|
|
8
8
|
"type": "git",
|
|
9
9
|
"url": "https://github.com/crewswarm/crewswarm.git"
|
|
10
10
|
},
|
|
11
|
+
"homepage": "https://crewswarm.ai",
|
|
12
|
+
"bugs": {
|
|
13
|
+
"url": "https://github.com/crewswarm/crewswarm/issues"
|
|
14
|
+
},
|
|
11
15
|
"keywords": [
|
|
12
16
|
"ai",
|
|
13
17
|
"agents",
|
|
@@ -110,6 +114,7 @@
|
|
|
110
114
|
"vibe:start": "cd apps/vibe && NODE_DISABLE_COMPILE_CACHE=1 npm start",
|
|
111
115
|
"vibe:watch": "NODE_DISABLE_COMPILE_CACHE=1 node apps/vibe/watch-server.mjs",
|
|
112
116
|
"vibe:full": "bash scripts/start-studio-full.sh",
|
|
117
|
+
"test:playwright": "npx playwright test --reporter=line",
|
|
113
118
|
"test:e2e:vibe": "node node_modules/playwright/cli.js test --config=playwright.config.js",
|
|
114
119
|
"test:e2e:vibe:headed": "node node_modules/playwright/cli.js test --config=playwright.config.js --headed",
|
|
115
120
|
"crew-lead": "node crew-lead.mjs",
|
package/scripts/dashboard.mjs
CHANGED
|
@@ -1831,8 +1831,13 @@ const server = http.createServer(async (req, res) => {
|
|
|
1831
1831
|
req.on("data", (c) => (body += c));
|
|
1832
1832
|
req.on("end", async () => {
|
|
1833
1833
|
let suite = "test:unit";
|
|
1834
|
-
|
|
1835
|
-
|
|
1834
|
+
let singleFile = null;
|
|
1835
|
+
try {
|
|
1836
|
+
const parsed = JSON.parse(body);
|
|
1837
|
+
suite = parsed.suite || suite;
|
|
1838
|
+
singleFile = parsed.file || null;
|
|
1839
|
+
} catch { /* default */ }
|
|
1840
|
+
const allowed = ["test:unit", "test:integration", "test:e2e", "test:all", "test", "test:e2e:vibe", "test:playwright"];
|
|
1836
1841
|
if (!allowed.includes(suite)) {
|
|
1837
1842
|
res.writeHead(400, { "content-type": "application/json" });
|
|
1838
1843
|
res.end(JSON.stringify({ error: "Invalid suite: " + suite }));
|
|
@@ -1842,9 +1847,16 @@ const server = http.createServer(async (req, res) => {
|
|
|
1842
1847
|
const progressFile = path.join(CREWSWARM_DIR, "test-results", ".test-progress.json");
|
|
1843
1848
|
const outputFile = path.join(CREWSWARM_DIR, "test-results", ".test-output.log");
|
|
1844
1849
|
// Write initial progress
|
|
1845
|
-
await fs.promises.writeFile(progressFile, JSON.stringify({ suite, running: true, pid: 0, started: Date.now(), passed: 0, failed: 0, skipped: 0, files_done: 0, current_file: "" }));
|
|
1850
|
+
await fs.promises.writeFile(progressFile, JSON.stringify({ suite, running: true, pid: 0, started: Date.now(), passed: 0, failed: 0, skipped: 0, files_done: 0, current_file: singleFile || "" }));
|
|
1851
|
+
let child;
|
|
1846
1852
|
const outFd = fs.openSync(outputFile, "w");
|
|
1847
|
-
|
|
1853
|
+
if (singleFile) {
|
|
1854
|
+
// Single-file run: use node --test directly on the file
|
|
1855
|
+
const absFile = path.isAbsolute(singleFile) ? singleFile : path.join(CREWSWARM_DIR, singleFile);
|
|
1856
|
+
child = spawn("node", ["--test", "--test-reporter=./scripts/test-reporter.mjs", absFile], { cwd: CREWSWARM_DIR, stdio: ["ignore", outFd, outFd], detached: true });
|
|
1857
|
+
} else {
|
|
1858
|
+
child = spawn("npm", ["run", suite], { cwd: CREWSWARM_DIR, stdio: ["ignore", outFd, outFd], detached: true });
|
|
1859
|
+
}
|
|
1848
1860
|
child.unref();
|
|
1849
1861
|
fs.closeSync(outFd);
|
|
1850
1862
|
// Update progress by tailing the output file
|
|
@@ -1898,6 +1910,173 @@ const server = http.createServer(async (req, res) => {
|
|
|
1898
1910
|
return;
|
|
1899
1911
|
}
|
|
1900
1912
|
|
|
1913
|
+
// ── GET /api/tests/stale — files changed since last test run ────────────
|
|
1914
|
+
if (url.pathname === "/api/tests/stale" && req.method === "GET") {
|
|
1915
|
+
const resultsDir = path.join(CREWSWARM_DIR, "test-results");
|
|
1916
|
+
const logPath = path.join(resultsDir, "test-log.jsonl");
|
|
1917
|
+
try {
|
|
1918
|
+
// Read last-run fingerprints from test-log.jsonl
|
|
1919
|
+
const fingerprintByFile = new Map();
|
|
1920
|
+
try {
|
|
1921
|
+
const lines = (await fs.promises.readFile(logPath, "utf8")).split("\n").filter(Boolean);
|
|
1922
|
+
for (const line of lines) {
|
|
1923
|
+
try {
|
|
1924
|
+
const entry = JSON.parse(line);
|
|
1925
|
+
if (entry.file && entry.file_fingerprint?.mtime) {
|
|
1926
|
+
// Keep the most recent entry per file
|
|
1927
|
+
fingerprintByFile.set(entry.file, entry.file_fingerprint);
|
|
1928
|
+
}
|
|
1929
|
+
} catch {}
|
|
1930
|
+
}
|
|
1931
|
+
} catch {}
|
|
1932
|
+
const stale = [];
|
|
1933
|
+
for (const [filePath, fp] of fingerprintByFile) {
|
|
1934
|
+
try {
|
|
1935
|
+
const stat = await fs.promises.stat(filePath);
|
|
1936
|
+
const currentMtime = stat.mtime.toISOString();
|
|
1937
|
+
if (currentMtime > fp.mtime) {
|
|
1938
|
+
stale.push({
|
|
1939
|
+
file: filePath.replace(CREWSWARM_DIR + "/", ""),
|
|
1940
|
+
lastRun: fp.mtime,
|
|
1941
|
+
lastModified: currentMtime,
|
|
1942
|
+
});
|
|
1943
|
+
}
|
|
1944
|
+
} catch {}
|
|
1945
|
+
}
|
|
1946
|
+
res.writeHead(200, { "content-type": "application/json" });
|
|
1947
|
+
res.end(JSON.stringify({ stale }));
|
|
1948
|
+
} catch (e) {
|
|
1949
|
+
res.writeHead(200, { "content-type": "application/json" });
|
|
1950
|
+
res.end(JSON.stringify({ stale: [], error: e.message }));
|
|
1951
|
+
}
|
|
1952
|
+
return;
|
|
1953
|
+
}
|
|
1954
|
+
|
|
1955
|
+
// ── GET /api/tests/stream — SSE stream of running test output ───────────
|
|
1956
|
+
if (url.pathname === "/api/tests/stream" && req.method === "GET") {
|
|
1957
|
+
const outputFile = path.join(CREWSWARM_DIR, "test-results", ".test-output.log");
|
|
1958
|
+
res.writeHead(200, {
|
|
1959
|
+
"content-type": "text/event-stream",
|
|
1960
|
+
"cache-control": "no-cache",
|
|
1961
|
+
"connection": "keep-alive",
|
|
1962
|
+
"access-control-allow-origin": "*",
|
|
1963
|
+
});
|
|
1964
|
+
let lastSize = 0;
|
|
1965
|
+
// Send existing content first
|
|
1966
|
+
try {
|
|
1967
|
+
const content = await fs.promises.readFile(outputFile, "utf8");
|
|
1968
|
+
if (content) {
|
|
1969
|
+
res.write("data: " + JSON.stringify({ text: content, reset: true }) + "\n\n");
|
|
1970
|
+
lastSize = Buffer.byteLength(content, "utf8");
|
|
1971
|
+
}
|
|
1972
|
+
} catch {}
|
|
1973
|
+
const streamInterval = setInterval(async () => {
|
|
1974
|
+
try {
|
|
1975
|
+
const stat = await fs.promises.stat(outputFile);
|
|
1976
|
+
if (stat.size > lastSize) {
|
|
1977
|
+
const fd = await fs.promises.open(outputFile, "r");
|
|
1978
|
+
const newBytes = stat.size - lastSize;
|
|
1979
|
+
const buf = Buffer.alloc(newBytes);
|
|
1980
|
+
await fd.read(buf, 0, newBytes, lastSize);
|
|
1981
|
+
await fd.close();
|
|
1982
|
+
const text = buf.toString("utf8");
|
|
1983
|
+
lastSize = stat.size;
|
|
1984
|
+
res.write("data: " + JSON.stringify({ text }) + "\n\n");
|
|
1985
|
+
}
|
|
1986
|
+
// Check if done
|
|
1987
|
+
const progressFile = path.join(CREWSWARM_DIR, "test-results", ".test-progress.json");
|
|
1988
|
+
try {
|
|
1989
|
+
const prog = JSON.parse(await fs.promises.readFile(progressFile, "utf8"));
|
|
1990
|
+
if (!prog.running && prog.finished) {
|
|
1991
|
+
res.write("data: " + JSON.stringify({ done: true }) + "\n\n");
|
|
1992
|
+
clearInterval(streamInterval);
|
|
1993
|
+
res.end();
|
|
1994
|
+
}
|
|
1995
|
+
} catch {}
|
|
1996
|
+
} catch {}
|
|
1997
|
+
}, 500);
|
|
1998
|
+
req.on("close", () => { clearInterval(streamInterval); });
|
|
1999
|
+
return;
|
|
2000
|
+
}
|
|
2001
|
+
|
|
2002
|
+
// ── GET /api/tests/screenshot — serve a Playwright failure screenshot ────
|
|
2003
|
+
if (url.pathname === "/api/tests/screenshot" && req.method === "GET") {
|
|
2004
|
+
const relPath = url.searchParams.get("path");
|
|
2005
|
+
if (!relPath) { res.writeHead(400); res.end("Missing path"); return; }
|
|
2006
|
+
// Security: only allow paths within test-results/
|
|
2007
|
+
const safePath = path.normalize(relPath).replace(/^(\.\.(\/|\\|$))+/, "");
|
|
2008
|
+
const absPath = path.join(CREWSWARM_DIR, "test-results", safePath);
|
|
2009
|
+
if (!absPath.startsWith(path.join(CREWSWARM_DIR, "test-results"))) {
|
|
2010
|
+
res.writeHead(403); res.end("Forbidden"); return;
|
|
2011
|
+
}
|
|
2012
|
+
try {
|
|
2013
|
+
const stat = await fs.promises.stat(absPath);
|
|
2014
|
+
const ext = path.extname(absPath).toLowerCase();
|
|
2015
|
+
const mimeMap = { ".png": "image/png", ".jpg": "image/jpeg", ".jpeg": "image/jpeg", ".webp": "image/webp" };
|
|
2016
|
+
const mime = mimeMap[ext] || "application/octet-stream";
|
|
2017
|
+
res.writeHead(200, { "content-type": mime, "content-length": stat.size, "cache-control": "max-age=300" });
|
|
2018
|
+
fs.createReadStream(absPath).pipe(res);
|
|
2019
|
+
} catch {
|
|
2020
|
+
res.writeHead(404); res.end("Screenshot not found");
|
|
2021
|
+
}
|
|
2022
|
+
return;
|
|
2023
|
+
}
|
|
2024
|
+
|
|
2025
|
+
// ── GET /api/tests/coverage-map — source files vs test coverage ──────────
|
|
2026
|
+
if (url.pathname === "/api/tests/coverage-map" && req.method === "GET") {
|
|
2027
|
+
try {
|
|
2028
|
+
const libDir = path.join(CREWSWARM_DIR, "lib");
|
|
2029
|
+
const crewCliSrcDir = path.join(CREWSWARM_DIR, "crew-cli", "src");
|
|
2030
|
+
const unitTestDir = path.join(CREWSWARM_DIR, "test", "unit");
|
|
2031
|
+
const crewCliTestDir = path.join(CREWSWARM_DIR, "crew-cli", "tests", "unit");
|
|
2032
|
+
|
|
2033
|
+
// Collect test file basenames (strip extension)
|
|
2034
|
+
const testBases = new Set();
|
|
2035
|
+
async function collectTestBases(dir) {
|
|
2036
|
+
try {
|
|
2037
|
+
for (const ent of await fs.promises.readdir(dir, { withFileTypes: true })) {
|
|
2038
|
+
if (ent.isDirectory()) { await collectTestBases(path.join(dir, ent.name)); continue; }
|
|
2039
|
+
if (ent.name.match(/\.test\.(mjs|js|ts)$/)) {
|
|
2040
|
+
// e.g. crew-judge.test.mjs -> crew-judge
|
|
2041
|
+
testBases.add(ent.name.replace(/\.test\.(mjs|js|ts)$/, ""));
|
|
2042
|
+
}
|
|
2043
|
+
}
|
|
2044
|
+
} catch {}
|
|
2045
|
+
}
|
|
2046
|
+
await collectTestBases(unitTestDir);
|
|
2047
|
+
await collectTestBases(crewCliTestDir);
|
|
2048
|
+
|
|
2049
|
+
// Collect source files and check coverage
|
|
2050
|
+
const covered = [];
|
|
2051
|
+
const uncovered = [];
|
|
2052
|
+
async function scanSourceDir(dir, prefix) {
|
|
2053
|
+
try {
|
|
2054
|
+
for (const ent of await fs.promises.readdir(dir, { withFileTypes: true })) {
|
|
2055
|
+
const fullPath = path.join(dir, ent.name);
|
|
2056
|
+
const relPath = prefix + "/" + ent.name;
|
|
2057
|
+
if (ent.isDirectory()) { await scanSourceDir(fullPath, relPath); continue; }
|
|
2058
|
+
if (!ent.name.match(/\.(mjs|js|ts)$/) || ent.name.includes(".d.ts") || ent.name.includes(".test.")) continue;
|
|
2059
|
+
const base = ent.name.replace(/\.(mjs|js|ts)$/, "");
|
|
2060
|
+
// Check if any test file matches by base name (fuzzy: contains or equals)
|
|
2061
|
+
const hasCoverage = [...testBases].some(tb => tb === base || tb.includes(base) || base.includes(tb));
|
|
2062
|
+
const entry = { file: relPath, base };
|
|
2063
|
+
if (hasCoverage) covered.push(entry);
|
|
2064
|
+
else uncovered.push(entry);
|
|
2065
|
+
}
|
|
2066
|
+
} catch {}
|
|
2067
|
+
}
|
|
2068
|
+
await scanSourceDir(libDir, "lib");
|
|
2069
|
+
await scanSourceDir(crewCliSrcDir, "crew-cli/src");
|
|
2070
|
+
|
|
2071
|
+
res.writeHead(200, { "content-type": "application/json" });
|
|
2072
|
+
res.end(JSON.stringify({ covered, uncovered, totalCovered: covered.length, totalUncovered: uncovered.length }));
|
|
2073
|
+
} catch (e) {
|
|
2074
|
+
res.writeHead(200, { "content-type": "application/json" });
|
|
2075
|
+
res.end(JSON.stringify({ covered: [], uncovered: [], error: e.message }));
|
|
2076
|
+
}
|
|
2077
|
+
return;
|
|
2078
|
+
}
|
|
2079
|
+
|
|
1901
2080
|
// ── First-run detection ──────────────────────────────────────────────────
|
|
1902
2081
|
if (url.pathname === "/api/first-run-status" && req.method === "GET") {
|
|
1903
2082
|
const cfg = readSwarmConfigSafe();
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
#!/usr/bin/env bash
|
|
2
2
|
# crewswarm Docker Installer — one-line setup for cloud VMs and dedicated servers
|
|
3
|
-
# Usage: curl -fsSL https://raw.githubusercontent.com/
|
|
3
|
+
# Usage: curl -fsSL https://raw.githubusercontent.com/crewswarm/crewswarm/main/scripts/install-docker.sh | bash
|
|
4
4
|
|
|
5
5
|
set -e
|
|
6
6
|
|
package/scripts/start.mjs
CHANGED
|
@@ -2,24 +2,29 @@
|
|
|
2
2
|
/**
|
|
3
3
|
* scripts/start.mjs — CrewSwarm first-run entry point
|
|
4
4
|
*
|
|
5
|
-
* This is the script behind `npm start`. It validates the
|
|
6
|
-
* handing off to the real stack so that a brand-new user
|
|
7
|
-
* and types `npm start` gets clear, actionable guidance
|
|
8
|
-
* trace.
|
|
5
|
+
* This is the script behind `npm start` / `npx crewswarm`. It validates the
|
|
6
|
+
* environment before handing off to the real stack so that a brand-new user
|
|
7
|
+
* who clones the repo and types `npm start` gets clear, actionable guidance
|
|
8
|
+
* rather than a stack trace.
|
|
9
9
|
*
|
|
10
10
|
* Checks performed (in order):
|
|
11
11
|
* 1. Node.js version >= 20
|
|
12
12
|
* 2. ~/.crewswarm/crewswarm.json exists (created by install.sh)
|
|
13
|
-
* 3. ~/.crewswarm/
|
|
13
|
+
* 3. ~/.crewswarm/config.json exists (created by install.sh)
|
|
14
14
|
* 4. At least one provider with an apiKey is configured
|
|
15
15
|
*
|
|
16
|
-
* On success
|
|
16
|
+
* On success:
|
|
17
|
+
* 1. Spawns start-crew.mjs in the background (RT bus + crew-lead + bridges)
|
|
18
|
+
* 2. Waits 3 seconds for services to come up
|
|
19
|
+
* 3. Starts dashboard in the foreground (the process the user sees)
|
|
20
|
+
*
|
|
21
|
+
* NOTE: `npm run dashboard` still starts only the dashboard (unchanged).
|
|
17
22
|
*/
|
|
18
23
|
|
|
19
24
|
import fs from "node:fs";
|
|
20
25
|
import path from "node:path";
|
|
21
26
|
import os from "node:os";
|
|
22
|
-
import { spawnSync } from "node:child_process";
|
|
27
|
+
import { spawn, spawnSync } from "node:child_process";
|
|
23
28
|
import { fileURLToPath } from "node:url";
|
|
24
29
|
|
|
25
30
|
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
@@ -63,7 +68,7 @@ success(`Node.js v${process.versions.node}`);
|
|
|
63
68
|
// ── 2. Config directory ───────────────────────────────────────────────────────
|
|
64
69
|
const CREWSWARM_DIR = path.join(os.homedir(), ".crewswarm");
|
|
65
70
|
const SWARM_CFG = path.join(CREWSWARM_DIR, "crewswarm.json");
|
|
66
|
-
const SYS_CFG = path.join(CREWSWARM_DIR, "
|
|
71
|
+
const SYS_CFG = path.join(CREWSWARM_DIR, "config.json");
|
|
67
72
|
const INSTALL_SH = path.join(ROOT, "install.sh");
|
|
68
73
|
|
|
69
74
|
function tryReadJSON(p) {
|
|
@@ -130,14 +135,44 @@ if (agents.length === 0) {
|
|
|
130
135
|
}
|
|
131
136
|
info(`${agents.length} agent(s) defined`);
|
|
132
137
|
|
|
133
|
-
// ── 7.
|
|
138
|
+
// ── 7. Start full stack ───────────────────────────────────────────────────────
|
|
139
|
+
console.log("");
|
|
140
|
+
divider();
|
|
141
|
+
info("All checks passed — starting full CrewSwarm stack");
|
|
142
|
+
divider();
|
|
143
|
+
console.log("");
|
|
144
|
+
|
|
145
|
+
// Step 1: Spawn start-crew.mjs in the background (RT bus + crew-lead + bridges)
|
|
146
|
+
info("Launching RT bus, crew-lead, and gateway bridges…");
|
|
147
|
+
const crewScript = path.join(ROOT, "scripts", "start-crew.mjs");
|
|
148
|
+
const crewProc = spawn(process.execPath, [crewScript], {
|
|
149
|
+
cwd: ROOT,
|
|
150
|
+
stdio: "inherit",
|
|
151
|
+
detached: false,
|
|
152
|
+
env: process.env,
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
// Wait for start-crew.mjs to finish its synchronous setup (it exits after spawning daemons)
|
|
156
|
+
await new Promise((resolve) => {
|
|
157
|
+
crewProc.on("exit", resolve);
|
|
158
|
+
crewProc.on("error", (err) => {
|
|
159
|
+
warn(`start-crew.mjs exited with error: ${err.message}`);
|
|
160
|
+
resolve();
|
|
161
|
+
});
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
// Step 2: Give the background daemons a moment to bind their ports
|
|
165
|
+
info("Waiting 3 s for services to come up…");
|
|
166
|
+
await new Promise((resolve) => setTimeout(resolve, 3000));
|
|
167
|
+
|
|
168
|
+
// Step 3: Start dashboard in the foreground (what the user sees)
|
|
134
169
|
console.log("");
|
|
135
170
|
divider();
|
|
136
|
-
|
|
171
|
+
console.log(bold(" All services started. Dashboard → http://127.0.0.1:4319"));
|
|
137
172
|
divider();
|
|
138
173
|
console.log("");
|
|
139
174
|
|
|
140
|
-
const result = spawnSync(
|
|
175
|
+
const result = spawnSync(process.execPath, [path.join(ROOT, "scripts", "dashboard.mjs")], {
|
|
141
176
|
cwd: ROOT,
|
|
142
177
|
stdio: "inherit",
|
|
143
178
|
env: process.env,
|