mcp-coordinator 0.2.1 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +846 -846
- package/dashboard/Dockerfile +19 -19
- package/dashboard/public/index.html +1178 -1178
- package/dist/cli/dashboard.js +9 -5
- package/dist/cli/server/backup.d.ts +7 -0
- package/dist/cli/server/backup.js +162 -0
- package/dist/cli/server/index.js +5 -0
- package/dist/cli/server/restore.d.ts +2 -0
- package/dist/cli/server/restore.js +117 -0
- package/dist/cli/server/start.js +24 -1
- package/dist/cli/server/status.js +16 -23
- package/dist/src/agent-activity.js +6 -6
- package/dist/src/agent-registry.js +6 -6
- package/dist/src/announce-workflow.d.ts +52 -0
- package/dist/src/announce-workflow.js +91 -0
- package/dist/src/consultation.d.ts +22 -0
- package/dist/src/consultation.js +118 -45
- package/dist/src/database.js +126 -126
- package/dist/src/db-adapter.d.ts +30 -0
- package/dist/src/db-adapter.js +32 -1
- package/dist/src/dependency-map.js +5 -5
- package/dist/src/file-tracker.d.ts +10 -0
- package/dist/src/file-tracker.js +40 -8
- package/dist/src/http/handle-health.d.ts +23 -0
- package/dist/src/http/handle-health.js +86 -0
- package/dist/src/http/handle-rest.d.ts +23 -0
- package/dist/src/http/handle-rest.js +374 -0
- package/dist/src/http/utils.d.ts +15 -0
- package/dist/src/http/utils.js +39 -0
- package/dist/src/impact-scorer.js +87 -50
- package/dist/src/introspection.js +1 -1
- package/dist/src/metrics.d.ts +83 -0
- package/dist/src/metrics.js +162 -0
- package/dist/src/mqtt-bridge.d.ts +21 -0
- package/dist/src/mqtt-bridge.js +55 -5
- package/dist/src/mqtt-broker.d.ts +16 -0
- package/dist/src/mqtt-broker.js +16 -1
- package/dist/src/path-guard.d.ts +14 -0
- package/dist/src/path-guard.js +44 -0
- package/dist/src/reset-guard.d.ts +16 -0
- package/dist/src/reset-guard.js +24 -0
- package/dist/src/serve-http.d.ts +31 -1
- package/dist/src/serve-http.js +189 -446
- package/dist/src/server-setup.d.ts +2 -0
- package/dist/src/server-setup.js +25 -366
- package/dist/src/sse-emitter.d.ts +6 -0
- package/dist/src/sse-emitter.js +50 -2
- package/dist/src/tools/agents-tools.d.ts +8 -0
- package/dist/src/tools/agents-tools.js +46 -0
- package/dist/src/tools/consultation-tools.d.ts +21 -0
- package/dist/src/tools/consultation-tools.js +170 -0
- package/dist/src/tools/dependencies-tools.d.ts +8 -0
- package/dist/src/tools/dependencies-tools.js +27 -0
- package/dist/src/tools/files-tools.d.ts +8 -0
- package/dist/src/tools/files-tools.js +28 -0
- package/dist/src/tools/mqtt-tools.d.ts +9 -0
- package/dist/src/tools/mqtt-tools.js +33 -0
- package/dist/src/tools/status-tools.d.ts +8 -0
- package/dist/src/tools/status-tools.js +63 -0
- package/package.json +83 -80
|
@@ -11,6 +11,7 @@ import { SseEmitter } from "./sse-emitter.js";
|
|
|
11
11
|
import { MqttBridge } from "./mqtt-bridge.js";
|
|
12
12
|
import { AgentActivityTracker } from "./agent-activity.js";
|
|
13
13
|
import { QuotaCache } from "./quota/quota-cache.js";
|
|
14
|
+
import { Metrics } from "./metrics.js";
|
|
14
15
|
import type { CoordinatorConfig } from "./types.js";
|
|
15
16
|
import { type Logger } from "./logger.js";
|
|
16
17
|
export interface CoordinatorServices {
|
|
@@ -27,6 +28,7 @@ export interface CoordinatorServices {
|
|
|
27
28
|
sseEmitter: SseEmitter;
|
|
28
29
|
mqttBridge: MqttBridge;
|
|
29
30
|
quotaCache: QuotaCache;
|
|
31
|
+
metrics: Metrics;
|
|
30
32
|
}
|
|
31
33
|
/** Create shared services (once at startup). */
|
|
32
34
|
export declare function createServices(config: CoordinatorConfig): CoordinatorServices;
|
package/dist/src/server-setup.js
CHANGED
|
@@ -1,6 +1,11 @@
|
|
|
1
1
|
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
2
|
-
import { z } from "zod";
|
|
3
2
|
import { initDatabase } from "./database.js";
|
|
3
|
+
import { registerConsultationTools } from "./tools/consultation-tools.js";
|
|
4
|
+
import { registerAgentTools } from "./tools/agents-tools.js";
|
|
5
|
+
import { registerFilesTools } from "./tools/files-tools.js";
|
|
6
|
+
import { registerDependenciesTools } from "./tools/dependencies-tools.js";
|
|
7
|
+
import { registerStatusTools } from "./tools/status-tools.js";
|
|
8
|
+
import { registerMqttTools } from "./tools/mqtt-tools.js";
|
|
4
9
|
import { AgentRegistry } from "./agent-registry.js";
|
|
5
10
|
import { Consultation } from "./consultation.js";
|
|
6
11
|
import { ConflictDetector } from "./conflict-detector.js";
|
|
@@ -11,9 +16,9 @@ import { SummaryContextProvider } from "./context-provider.js";
|
|
|
11
16
|
import { IntrospectionManager } from "./introspection.js";
|
|
12
17
|
import { SseEmitter } from "./sse-emitter.js";
|
|
13
18
|
import { MqttBridge } from "./mqtt-bridge.js";
|
|
14
|
-
import { assessPlanQuality } from "./plan-quality.js";
|
|
15
19
|
import { AgentActivityTracker } from "./agent-activity.js";
|
|
16
20
|
import { QuotaCache } from "./quota/quota-cache.js";
|
|
21
|
+
import { Metrics } from "./metrics.js";
|
|
17
22
|
import { createLogger } from "./logger.js";
|
|
18
23
|
import { getVersion } from "../cli/version.js";
|
|
19
24
|
const VERSION = getVersion();
|
|
@@ -32,6 +37,7 @@ export function createServices(config) {
|
|
|
32
37
|
const contextProvider = new SummaryContextProvider(registry, consultation, fileTracker);
|
|
33
38
|
const sseEmitter = new SseEmitter();
|
|
34
39
|
const mqttBridge = new MqttBridge(logger.child({ component: "mqtt" }));
|
|
40
|
+
const metrics = new Metrics();
|
|
35
41
|
// Quota cache — macOS-only for now, Linux/Windows stubs return 503 via the
|
|
36
42
|
// /api/quota handler so raids keep running without a quota guardrail there.
|
|
37
43
|
// onRefresh fans the new data out to dashboard (SSE) + any live listener (MQTT)
|
|
@@ -58,8 +64,9 @@ export function createServices(config) {
|
|
|
58
64
|
else if (event.type === "agent_offline")
|
|
59
65
|
quotaCache.onAgentInactive();
|
|
60
66
|
});
|
|
61
|
-
// Centralized resolution → SSE + MQTT
|
|
67
|
+
// Centralized resolution → SSE + MQTT + metrics
|
|
62
68
|
consultation.onResolve((event) => {
|
|
69
|
+
metrics.recordThreadResolved(event.resolution_type);
|
|
63
70
|
sseEmitter.emit("thread_resolved", {
|
|
64
71
|
thread_id: event.thread_id,
|
|
65
72
|
resolution_type: event.resolution_type,
|
|
@@ -73,10 +80,15 @@ export function createServices(config) {
|
|
|
73
80
|
if (event.resolution_type !== "auto_resolved") {
|
|
74
81
|
mqttBridge.publishResolution(event.thread_id, "resolved", event.resolution_summary || "");
|
|
75
82
|
}
|
|
83
|
+
// P1 fix: clear the retained `coordinator/consultations/new` event so a
|
|
84
|
+
// coordinator restart doesn't re-broadcast a consultation that's already
|
|
85
|
+
// been resolved. No-op when the retained slot holds a different (newer)
|
|
86
|
+
// thread.
|
|
87
|
+
mqttBridge.clearRetainedConsultation(event.thread_id);
|
|
76
88
|
});
|
|
77
89
|
return {
|
|
78
90
|
logger, registry, activityTracker, consultation, conflictDetector,
|
|
79
|
-
depMap, fileTracker, impactScorer, introspection, contextProvider, sseEmitter, mqttBridge, quotaCache,
|
|
91
|
+
depMap, fileTracker, impactScorer, introspection, contextProvider, sseEmitter, mqttBridge, quotaCache, metrics,
|
|
80
92
|
};
|
|
81
93
|
}
|
|
82
94
|
/** Create a new McpServer bound to the shared services (one per MCP session). */
|
|
@@ -87,367 +99,14 @@ export function createMcpServer(services) {
|
|
|
87
99
|
name: "mcp-coordinator-v3",
|
|
88
100
|
version: VERSION,
|
|
89
101
|
});
|
|
90
|
-
//
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
mqttBridge.registerAgent(agent_id, name);
|
|
100
|
-
return { content: [{ type: "text", text: JSON.stringify(agent) }] };
|
|
101
|
-
});
|
|
102
|
-
server.tool("list_agents", "List registered agents", {
|
|
103
|
-
online_only: z.boolean().optional(),
|
|
104
|
-
}, async ({ online_only }) => {
|
|
105
|
-
const agents = online_only ? registry.listOnline() : registry.listAll();
|
|
106
|
-
return { content: [{ type: "text", text: JSON.stringify(agents) }] };
|
|
107
|
-
});
|
|
108
|
-
server.tool("heartbeat", "Update agent activity status and last seen timestamp", {
|
|
109
|
-
agent_id: z.string(),
|
|
110
|
-
current_file: z.string().optional(),
|
|
111
|
-
current_thread: z.string().optional(),
|
|
112
|
-
}, async ({ agent_id, current_file, current_thread }) => {
|
|
113
|
-
registry.heartbeat(agent_id);
|
|
114
|
-
activityTracker.heartbeat(agent_id, {
|
|
115
|
-
currentFile: current_file || null,
|
|
116
|
-
currentThread: current_thread || null,
|
|
117
|
-
});
|
|
118
|
-
const activity = activityTracker.getActivity(agent_id);
|
|
119
|
-
sseEmitter.emit("agent_activity", {
|
|
120
|
-
agent_id, activity_status: activity.activity_status,
|
|
121
|
-
current_file: activity.current_file, current_thread: activity.current_thread,
|
|
122
|
-
});
|
|
123
|
-
return { content: [{ type: "text", text: JSON.stringify(activity) }] };
|
|
124
|
-
});
|
|
125
|
-
server.tool("agent_activity", "Get activity status for all online agents", {}, async () => {
|
|
126
|
-
const activities = activityTracker.listAll({ idleAfterMinutes: 5 });
|
|
127
|
-
return { content: [{ type: "text", text: JSON.stringify(activities) }] };
|
|
128
|
-
});
|
|
129
|
-
// ── CONSULTATION TOOLS ──
|
|
130
|
-
server.tool("announce_work", "Open a consultation thread before starting work", {
|
|
131
|
-
agent_id: z.string(),
|
|
132
|
-
subject: z.string(),
|
|
133
|
-
plan: z.string().optional(),
|
|
134
|
-
target_modules: z.array(z.string()),
|
|
135
|
-
target_files: z.array(z.string()),
|
|
136
|
-
depends_on_files: z.array(z.string()).optional(),
|
|
137
|
-
exports_affected: z.array(z.string()).optional(),
|
|
138
|
-
keep_open: z.boolean().optional().describe("Keep thread open even if no agents are concerned (for manual coordination like games or debates)"),
|
|
139
|
-
assigned_to: z.string().optional().describe("Directed-dispatch: only this agent_id will be allowed to claim the thread. Use for lead→worker handoffs in maitre/chaine/relais presets. Implies keep_open=true."),
|
|
140
|
-
}, async ({ agent_id, subject, plan, target_modules, target_files, depends_on_files, exports_affected, keep_open, assigned_to }) => {
|
|
141
|
-
mcpLog.info({ tool: "announce_work", agent_id, subject, target_modules, target_files, assigned_to }, "Tool called");
|
|
142
|
-
// Quality gate on plan
|
|
143
|
-
const planQuality = assessPlanQuality(plan);
|
|
144
|
-
const effectiveMode = planQuality.mode;
|
|
145
|
-
const conflicts = conflictDetector.detect({ agent_id, target_modules, target_files });
|
|
146
|
-
const thread = consultation.announceWork({
|
|
147
|
-
agent_id, subject, plan, target_modules, target_files, depends_on_files, exports_affected, keep_open, assigned_to,
|
|
148
|
-
});
|
|
149
|
-
// Store conflicts on thread
|
|
150
|
-
if (conflicts.length > 0) {
|
|
151
|
-
const db = (await import("./database.js")).getDb();
|
|
152
|
-
db.prepare("UPDATE threads SET conflicts = ? WHERE id = ?")
|
|
153
|
-
.run(JSON.stringify(conflicts), thread.id);
|
|
154
|
-
}
|
|
155
|
-
// Impact scoring: categorize all online agents
|
|
156
|
-
const categorized = impactScorer.categorize({
|
|
157
|
-
agent_id, target_modules, target_files, depends_on_files, exports_affected,
|
|
158
|
-
});
|
|
159
|
-
// Override expected_respondents with concerned agents from scorer
|
|
160
|
-
{
|
|
161
|
-
const db = (await import("./database.js")).getDb();
|
|
162
|
-
const concernedIds = categorized.concerned.map(s => s.agent_id);
|
|
163
|
-
db.prepare("UPDATE threads SET expected_respondents = ? WHERE id = ?")
|
|
164
|
-
.run(JSON.stringify(concernedIds), thread.id);
|
|
165
|
-
// Only auto-resolve when truly alone — no other online agents.
|
|
166
|
-
// If peers are online but not yet concerned, keep the thread open so
|
|
167
|
-
// a subsequent announce can still match via Layer 0. Thread timeouts
|
|
168
|
-
// naturally if no one joins.
|
|
169
|
-
const otherOnlineCount = registry.listOnline().filter((a) => a.id !== agent_id).length;
|
|
170
|
-
const shouldAutoResolve = concernedIds.length === 0 && otherOnlineCount === 0;
|
|
171
|
-
if (shouldAutoResolve && thread.status === "open" && !keep_open) {
|
|
172
|
-
db.prepare("UPDATE threads SET status = 'resolved', resolved_at = ? WHERE id = ?")
|
|
173
|
-
.run(new Date().toISOString(), thread.id);
|
|
174
|
-
consultation.emitResolution(thread.id, "auto_resolved");
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
// Emit impact_scored SSE events for all agents
|
|
178
|
-
for (const s of [...categorized.concerned, ...categorized.gray_zone, ...categorized.pass]) {
|
|
179
|
-
sseEmitter.emit("impact_scored", {
|
|
180
|
-
thread_id: thread.id, agent_id: s.agent_id, agent_name: s.agent_name,
|
|
181
|
-
score: s.score, reasons: s.reasons, category: s.score >= 90 ? "concerned" : s.score >= 30 ? "gray_zone" : "pass",
|
|
182
|
-
});
|
|
183
|
-
}
|
|
184
|
-
// Create introspection records and emit introspection_requested for gray_zone agents
|
|
185
|
-
for (const s of categorized.gray_zone) {
|
|
186
|
-
introspection.create({ thread_id: thread.id, agent_id: s.agent_id, score: s.score, reasons: s.reasons });
|
|
187
|
-
sseEmitter.emit("introspection_requested", {
|
|
188
|
-
thread_id: thread.id, agent_id: s.agent_id, agent_name: s.agent_name, score: s.score, reasons: s.reasons,
|
|
189
|
-
});
|
|
190
|
-
}
|
|
191
|
-
// Emit downgrade event when plan is provided but quality is insufficient
|
|
192
|
-
if (plan && effectiveMode === "discovery") {
|
|
193
|
-
sseEmitter.emit("impact_scored", {
|
|
194
|
-
thread_id: thread.id,
|
|
195
|
-
agent_id: agent_id,
|
|
196
|
-
agent_name: registry.get(agent_id)?.name || agent_id,
|
|
197
|
-
score: planQuality.score,
|
|
198
|
-
reasons: [`plan downgraded: score ${planQuality.score}/3 — ${!planQuality.checks.mentions_files ? 'no files' : ''} ${!planQuality.checks.concrete_approach ? 'vague approach' : ''} ${!planQuality.checks.sufficient_detail ? 'too short' : ''}`.trim()],
|
|
199
|
-
category: "plan_quality",
|
|
200
|
-
});
|
|
201
|
-
}
|
|
202
|
-
const updated = consultation.getThread(thread.id);
|
|
203
|
-
const respondents = JSON.parse(updated.expected_respondents || "[]");
|
|
204
|
-
sseEmitter.emit("thread_opened", {
|
|
205
|
-
thread_id: thread.id, initiator: agent_id, subject, target_modules, conflicts,
|
|
206
|
-
expected_respondents: respondents,
|
|
207
|
-
mode: effectiveMode,
|
|
208
|
-
plan: plan || null,
|
|
209
|
-
plan_quality: planQuality,
|
|
210
|
-
});
|
|
211
|
-
mqttBridge.publishConsultation(thread.id, agent_id, subject, target_modules);
|
|
212
|
-
// Gather context from concerned agents for the initiator
|
|
213
|
-
const contextForInitiator = respondents.map((rid) => contextProvider.getRelevantContext(rid, { thread_id: updated.id, subject, target_modules, target_files })).filter((ctx) => ctx.modules.length > 0);
|
|
214
|
-
return {
|
|
215
|
-
content: [{
|
|
216
|
-
type: "text",
|
|
217
|
-
text: JSON.stringify({ thread: updated, conflicts, context: contextForInitiator, impact: categorized }),
|
|
218
|
-
}],
|
|
219
|
-
};
|
|
220
|
-
});
|
|
221
|
-
server.tool("post_to_thread", "Post a message to a consultation thread", {
|
|
222
|
-
thread_id: z.string(),
|
|
223
|
-
agent_id: z.string(),
|
|
224
|
-
agent_name: z.string().optional(),
|
|
225
|
-
type: z.enum(["context", "suggestion", "warning"]),
|
|
226
|
-
content: z.string(),
|
|
227
|
-
context_snapshot: z.string().optional(),
|
|
228
|
-
in_reply_to: z.string().optional(),
|
|
229
|
-
}, async ({ thread_id, agent_id, agent_name, type, content, context_snapshot, in_reply_to }) => {
|
|
230
|
-
mcpLog.info({ tool: "post_to_thread", thread_id, agent_id, type }, "Tool called");
|
|
231
|
-
const msg = consultation.postToThread({
|
|
232
|
-
thread_id, agent_id, agent_name, type, content, context_snapshot, in_reply_to,
|
|
233
|
-
});
|
|
234
|
-
const thread = consultation.getThread(thread_id);
|
|
235
|
-
sseEmitter.emit("message_posted", {
|
|
236
|
-
thread_id, agent_id, agent_name: agent_name || agent_id,
|
|
237
|
-
type, content, round: thread?.round || 1,
|
|
238
|
-
token_estimate: msg.token_estimate || 0,
|
|
239
|
-
});
|
|
240
|
-
mqttBridge.publishMessage(thread_id, agent_id, type, content);
|
|
241
|
-
return { content: [{ type: "text", text: JSON.stringify(msg) }] };
|
|
242
|
-
});
|
|
243
|
-
server.tool("propose_resolution", "Propose a resolution for the consultation", {
|
|
244
|
-
thread_id: z.string(),
|
|
245
|
-
agent_id: z.string(),
|
|
246
|
-
summary: z.string(),
|
|
247
|
-
plan: z.string().optional(),
|
|
248
|
-
}, async ({ thread_id, agent_id, summary, plan }) => {
|
|
249
|
-
mcpLog.info({ tool: "propose_resolution", thread_id, agent_id }, "Tool called");
|
|
250
|
-
consultation.proposeResolution(thread_id, agent_id, summary);
|
|
251
|
-
sseEmitter.emit("resolution_proposed", { thread_id, agent_id, summary });
|
|
252
|
-
mqttBridge.publishResolution(thread_id, "resolving", summary);
|
|
253
|
-
const thread = consultation.getThread(thread_id);
|
|
254
|
-
return { content: [{ type: "text", text: JSON.stringify(thread) }] };
|
|
255
|
-
});
|
|
256
|
-
server.tool("approve_resolution", "Approve the proposed resolution", {
|
|
257
|
-
thread_id: z.string(),
|
|
258
|
-
agent_id: z.string(),
|
|
259
|
-
}, async ({ thread_id, agent_id }) => {
|
|
260
|
-
mcpLog.info({ tool: "approve_resolution", thread_id, agent_id }, "Tool called");
|
|
261
|
-
const agentInfo = registry.get(agent_id);
|
|
262
|
-
consultation.approveResolution(thread_id, agent_id, agentInfo?.name);
|
|
263
|
-
const thread = consultation.getThread(thread_id);
|
|
264
|
-
return { content: [{ type: "text", text: JSON.stringify(thread) }] };
|
|
265
|
-
});
|
|
266
|
-
server.tool("contest_resolution", "Contest the proposed resolution", {
|
|
267
|
-
thread_id: z.string(),
|
|
268
|
-
agent_id: z.string(),
|
|
269
|
-
reason: z.string(),
|
|
270
|
-
}, async ({ thread_id, agent_id, reason }) => {
|
|
271
|
-
mcpLog.info({ tool: "contest_resolution", thread_id, agent_id }, "Tool called");
|
|
272
|
-
consultation.contestResolution(thread_id, agent_id, reason);
|
|
273
|
-
const thread = consultation.getThread(thread_id);
|
|
274
|
-
return { content: [{ type: "text", text: JSON.stringify(thread) }] };
|
|
275
|
-
});
|
|
276
|
-
server.tool("close_thread", "Close a consultation thread", {
|
|
277
|
-
thread_id: z.string(),
|
|
278
|
-
agent_id: z.string(),
|
|
279
|
-
summary: z.string(),
|
|
280
|
-
}, async ({ thread_id, agent_id, summary }) => {
|
|
281
|
-
mcpLog.info({ tool: "close_thread", thread_id, agent_id }, "Tool called");
|
|
282
|
-
consultation.closeThread(thread_id, agent_id, summary);
|
|
283
|
-
return { content: [{ type: "text", text: "closed" }] };
|
|
284
|
-
});
|
|
285
|
-
server.tool("cancel_thread", "Cancel a consultation thread", {
|
|
286
|
-
thread_id: z.string(),
|
|
287
|
-
agent_id: z.string(),
|
|
288
|
-
reason: z.string().optional(),
|
|
289
|
-
}, async ({ thread_id, agent_id, reason }) => {
|
|
290
|
-
mcpLog.info({ tool: "cancel_thread", thread_id, agent_id }, "Tool called");
|
|
291
|
-
consultation.cancelThread(thread_id, agent_id, reason);
|
|
292
|
-
sseEmitter.emit("thread_cancelled", { thread_id, reason });
|
|
293
|
-
return { content: [{ type: "text", text: "cancelled" }] };
|
|
294
|
-
});
|
|
295
|
-
server.tool("get_thread", "Get a thread with all messages", {
|
|
296
|
-
thread_id: z.string(),
|
|
297
|
-
}, async ({ thread_id }) => {
|
|
298
|
-
const result = consultation.getThreadWithMessages(thread_id);
|
|
299
|
-
mcpLog.debug({ tool: "get_thread", thread_id, message_count: result?.messages.length }, "Tool called");
|
|
300
|
-
return { content: [{ type: "text", text: JSON.stringify(result) }] };
|
|
301
|
-
});
|
|
302
|
-
server.tool("get_thread_updates", "Get new messages since timestamp", {
|
|
303
|
-
agent_id: z.string(),
|
|
304
|
-
since: z.string().optional(),
|
|
305
|
-
}, async ({ agent_id, since }) => {
|
|
306
|
-
const updates = consultation.getThreadUpdates(agent_id, since);
|
|
307
|
-
return { content: [{ type: "text", text: JSON.stringify(updates) }] };
|
|
308
|
-
});
|
|
309
|
-
server.tool("list_threads", "List consultation threads", {
|
|
310
|
-
status: z.string().optional(),
|
|
311
|
-
agent_id: z.string().optional(),
|
|
312
|
-
module: z.string().optional(),
|
|
313
|
-
assigned_to_me: z.string().optional().describe("Filter to threads claimable by this agent_id: open pool (assigned_to NULL) OR directed to me. Use for worker agents receiving directed dispatches."),
|
|
314
|
-
}, async ({ status, agent_id, module, assigned_to_me }) => {
|
|
315
|
-
const threads = consultation.listThreads({ status, agent_id, module, assigned_to_me });
|
|
316
|
-
mcpLog.debug({ tool: "list_threads", status, agent_id, module, assigned_to_me, result_count: threads.length }, "Tool called");
|
|
317
|
-
return { content: [{ type: "text", text: JSON.stringify(threads) }] };
|
|
318
|
-
});
|
|
319
|
-
server.tool("log_action_summary", "Log a one-liner summary of an action", {
|
|
320
|
-
session_id: z.string(),
|
|
321
|
-
agent_id: z.string(),
|
|
322
|
-
file_path: z.string().optional(),
|
|
323
|
-
summary: z.string(),
|
|
324
|
-
}, async ({ session_id, agent_id, file_path, summary }) => {
|
|
325
|
-
const result = consultation.logActionSummary({ session_id, agent_id, file_path, summary });
|
|
326
|
-
sseEmitter.emit("action_summary", { agent_id, file_path, summary });
|
|
327
|
-
return { content: [{ type: "text", text: JSON.stringify(result) }] };
|
|
328
|
-
});
|
|
329
|
-
// ── FILE TRACKING TOOLS ──
|
|
330
|
-
server.tool("hot_files", "List files modified by multiple agents", {
|
|
331
|
-
since_minutes: z.number().optional(),
|
|
332
|
-
}, async ({ since_minutes }) => {
|
|
333
|
-
const files = fileTracker.getHotFiles(since_minutes || 30);
|
|
334
|
-
return { content: [{ type: "text", text: JSON.stringify(files) }] };
|
|
335
|
-
});
|
|
336
|
-
server.tool("get_session_files", "Get files modified in a session", {
|
|
337
|
-
session_id: z.string(),
|
|
338
|
-
}, async ({ session_id }) => {
|
|
339
|
-
const files = fileTracker.getBySession(session_id);
|
|
340
|
-
return { content: [{ type: "text", text: JSON.stringify(files) }] };
|
|
341
|
-
});
|
|
342
|
-
server.tool("check_file_conflict", "Check if another agent is editing a file", {
|
|
343
|
-
file_path: z.string(),
|
|
344
|
-
agent_id: z.string(),
|
|
345
|
-
within_minutes: z.number().optional(),
|
|
346
|
-
}, async ({ file_path, agent_id, within_minutes }) => {
|
|
347
|
-
const result = fileTracker.checkFileConflict(file_path, agent_id, within_minutes || 30);
|
|
348
|
-
return { content: [{ type: "text", text: JSON.stringify(result) }] };
|
|
349
|
-
});
|
|
350
|
-
// ── DEPENDENCY MAP TOOLS ──
|
|
351
|
-
server.tool("set_dependency_map", "Load module dependency graph", {
|
|
352
|
-
modules: z.string(), // JSON DependencyMap
|
|
353
|
-
}, async ({ modules }) => {
|
|
354
|
-
const map = JSON.parse(modules);
|
|
355
|
-
depMap.setMap(map);
|
|
356
|
-
return { content: [{ type: "text", text: "ok" }] };
|
|
357
|
-
});
|
|
358
|
-
server.tool("get_blast_radius", "Calculate impact of changes to a module", {
|
|
359
|
-
module_id: z.string(),
|
|
360
|
-
}, async ({ module_id }) => {
|
|
361
|
-
const radius = depMap.getBlastRadius(module_id);
|
|
362
|
-
return { content: [{ type: "text", text: JSON.stringify(radius) }] };
|
|
363
|
-
});
|
|
364
|
-
server.tool("get_module_info", "Get module dependency info", {
|
|
365
|
-
module_id: z.string(),
|
|
366
|
-
}, async ({ module_id }) => {
|
|
367
|
-
const info = depMap.getModuleInfo(module_id);
|
|
368
|
-
return { content: [{ type: "text", text: JSON.stringify(info) }] };
|
|
369
|
-
});
|
|
370
|
-
// ── STATUS TOOL ──
|
|
371
|
-
server.tool("coordinator_status", "Full system status", {}, async () => {
|
|
372
|
-
const online = registry.listOnline();
|
|
373
|
-
const openThreads = consultation.listThreads({ status: "open" });
|
|
374
|
-
const resolvingThreads = consultation.listThreads({ status: "resolving" });
|
|
375
|
-
const hotFiles = fileTracker.getHotFiles(30);
|
|
376
|
-
const status = {
|
|
377
|
-
agents_online: online.length,
|
|
378
|
-
agents: online.map((a) => ({ id: a.id, name: a.name, modules: JSON.parse(a.modules) })),
|
|
379
|
-
open_threads: openThreads.length,
|
|
380
|
-
resolving_threads: resolvingThreads.length,
|
|
381
|
-
hot_files: hotFiles.length,
|
|
382
|
-
mqtt_connected: mqttBridge.isConnected(),
|
|
383
|
-
};
|
|
384
|
-
mcpLog.debug({ tool: "coordinator_status", agents_online: online.length, open_threads: openThreads.length }, "Tool called");
|
|
385
|
-
return { content: [{ type: "text", text: JSON.stringify(status) }] };
|
|
386
|
-
});
|
|
387
|
-
// ── COORDINATION HELPERS ──
|
|
388
|
-
server.tool("wait_for_peers", "Block until at least N other online agents are registered, or timeout. Use before the first announce_work to avoid the race where one agent announces before peers have booted.", {
|
|
389
|
-
agent_id: z.string(),
|
|
390
|
-
min_peers: z.number().optional(),
|
|
391
|
-
timeout_seconds: z.number().optional(),
|
|
392
|
-
}, async ({ agent_id, min_peers, timeout_seconds }) => {
|
|
393
|
-
const targetPeers = min_peers ?? 1;
|
|
394
|
-
const timeoutMs = (timeout_seconds ?? 30) * 1000;
|
|
395
|
-
const pollIntervalMs = 1000;
|
|
396
|
-
const startedAt = Date.now();
|
|
397
|
-
mcpLog.info({ tool: "wait_for_peers", agent_id, min_peers: targetPeers, timeout_seconds: timeoutMs / 1000 }, "Tool called");
|
|
398
|
-
while (Date.now() - startedAt < timeoutMs) {
|
|
399
|
-
const peers = registry.listOnline().filter((a) => a.id !== agent_id);
|
|
400
|
-
if (peers.length >= targetPeers) {
|
|
401
|
-
return {
|
|
402
|
-
content: [{
|
|
403
|
-
type: "text",
|
|
404
|
-
text: JSON.stringify({
|
|
405
|
-
ready: true,
|
|
406
|
-
online_peers: peers.map((p) => ({ id: p.id, name: p.name })),
|
|
407
|
-
waited_ms: Date.now() - startedAt,
|
|
408
|
-
}),
|
|
409
|
-
}],
|
|
410
|
-
};
|
|
411
|
-
}
|
|
412
|
-
await new Promise((r) => setTimeout(r, pollIntervalMs));
|
|
413
|
-
}
|
|
414
|
-
const finalPeers = registry.listOnline().filter((a) => a.id !== agent_id);
|
|
415
|
-
return {
|
|
416
|
-
content: [{
|
|
417
|
-
type: "text",
|
|
418
|
-
text: JSON.stringify({
|
|
419
|
-
ready: false,
|
|
420
|
-
timeout: true,
|
|
421
|
-
online_peers: finalPeers.map((p) => ({ id: p.id, name: p.name })),
|
|
422
|
-
waited_ms: Date.now() - startedAt,
|
|
423
|
-
}),
|
|
424
|
-
}],
|
|
425
|
-
};
|
|
426
|
-
});
|
|
427
|
-
// ── MQTT LISTENER TOOLS (replaces standalone mqtt-mcp-bridge) ──
|
|
428
|
-
server.tool("wait_for_message", "Block until an MQTT consultation message arrives or timeout", {
|
|
429
|
-
agent_id: z.string(),
|
|
430
|
-
timeout_seconds: z.number().optional(),
|
|
431
|
-
}, async ({ agent_id, timeout_seconds }) => {
|
|
432
|
-
const timeoutMs = (timeout_seconds || 15) * 1000;
|
|
433
|
-
const msg = await mqttBridge.waitForMessage(agent_id, timeoutMs);
|
|
434
|
-
if (msg) {
|
|
435
|
-
return { content: [{ type: "text", text: JSON.stringify(msg) }] };
|
|
436
|
-
}
|
|
437
|
-
return { content: [{ type: "text", text: JSON.stringify({ timeout: true }) }] };
|
|
438
|
-
});
|
|
439
|
-
server.tool("get_queued_messages", "Get all queued MQTT messages without blocking", {
|
|
440
|
-
agent_id: z.string(),
|
|
441
|
-
}, async ({ agent_id }) => {
|
|
442
|
-
const messages = mqttBridge.getQueuedMessages(agent_id);
|
|
443
|
-
return { content: [{ type: "text", text: JSON.stringify(messages) }] };
|
|
444
|
-
});
|
|
445
|
-
server.tool("mqtt_publish", "Publish a message to an MQTT topic", {
|
|
446
|
-
topic: z.string(),
|
|
447
|
-
payload: z.string(),
|
|
448
|
-
}, async ({ topic, payload }) => {
|
|
449
|
-
mqttBridge.mqttPublish(topic, payload);
|
|
450
|
-
return { content: [{ type: "text", text: "published" }] };
|
|
451
|
-
});
|
|
102
|
+
// S1: all 23 MCP tools registered via per-domain modules under src/tools/.
|
|
103
|
+
// Each register*Tools function takes (server, services, mcpLog) and wires
|
|
104
|
+
// its tool group; nothing else lives here. See src/tools/*.ts for behavior.
|
|
105
|
+
registerAgentTools(server, services, mcpLog);
|
|
106
|
+
registerConsultationTools(server, services, mcpLog);
|
|
107
|
+
registerFilesTools(server, services, mcpLog);
|
|
108
|
+
registerDependenciesTools(server, services, mcpLog);
|
|
109
|
+
registerStatusTools(server, services, mcpLog);
|
|
110
|
+
registerMqttTools(server, services, mcpLog);
|
|
452
111
|
return server;
|
|
453
112
|
}
|
|
@@ -1,10 +1,16 @@
|
|
|
1
1
|
import type { CoordinatorEvent, EventType } from "./types.js";
|
|
2
2
|
type EventListener = (event: CoordinatorEvent) => void;
|
|
3
|
+
export declare const MAX_SSE_CLIENTS: number;
|
|
3
4
|
export declare class SseEmitter {
|
|
4
5
|
private listeners;
|
|
6
|
+
private rejectedCount;
|
|
5
7
|
emit(type: EventType, payload: Record<string, unknown>): void;
|
|
6
8
|
getEventsSince(lastId: number): CoordinatorEvent[];
|
|
7
9
|
addListener(listener: EventListener): () => void;
|
|
8
10
|
removeAllListeners(): void;
|
|
11
|
+
/** P3: introspection for tests + ops dashboards. */
|
|
12
|
+
listenerCount(): number;
|
|
13
|
+
/** P3: count of addListener calls refused due to MAX_SSE_CLIENTS. */
|
|
14
|
+
getRejectedCount(): number;
|
|
9
15
|
}
|
|
10
16
|
export {};
|
package/dist/src/sse-emitter.js
CHANGED
|
@@ -1,6 +1,25 @@
|
|
|
1
1
|
import { getDb } from "./database.js";
|
|
2
|
+
/**
|
|
3
|
+
* P3: bound the listener array so a runaway client (or DoS attempt) can't
|
|
4
|
+
* grow it without limit. Default 100 covers a small-to-mid swarm — enough
|
|
5
|
+
* headroom for a dashboard + every agent + a handful of CLI tailers, but
|
|
6
|
+
* not so large that a leak would silently exhaust memory. Override via
|
|
7
|
+
* COORDINATOR_MAX_SSE_CLIENTS for larger deployments.
|
|
8
|
+
*/
|
|
9
|
+
const DEFAULT_MAX_SSE_CLIENTS = 100;
|
|
10
|
+
export const MAX_SSE_CLIENTS = (() => {
|
|
11
|
+
const raw = process.env.COORDINATOR_MAX_SSE_CLIENTS;
|
|
12
|
+
if (!raw)
|
|
13
|
+
return DEFAULT_MAX_SSE_CLIENTS;
|
|
14
|
+
const n = parseInt(raw, 10);
|
|
15
|
+
return Number.isFinite(n) && n > 0 ? n : DEFAULT_MAX_SSE_CLIENTS;
|
|
16
|
+
})();
|
|
17
|
+
const NOOP = () => { };
|
|
2
18
|
export class SseEmitter {
|
|
3
19
|
listeners = [];
|
|
20
|
+
// P3: track refusals so operators can see when the cap is being hit.
|
|
21
|
+
// Also lets tests assert "we refused without throwing" without scraping logs.
|
|
22
|
+
rejectedCount = 0;
|
|
4
23
|
emit(type, payload) {
|
|
5
24
|
const db = getDb();
|
|
6
25
|
const payloadStr = JSON.stringify(payload);
|
|
@@ -13,8 +32,22 @@ export class SseEmitter {
|
|
|
13
32
|
payload: payloadStr,
|
|
14
33
|
created_at: new Date().toISOString(),
|
|
15
34
|
};
|
|
16
|
-
|
|
17
|
-
|
|
35
|
+
// P3: async fan-out via setImmediate so a slow listener (e.g. a stalled
|
|
36
|
+
// SSE client whose socket buffer is full) cannot block siblings or the
|
|
37
|
+
// emit() caller. Snapshot the array first so a listener that unsubscribes
|
|
38
|
+
// mid-loop doesn't shift indices under us.
|
|
39
|
+
const snapshot = this.listeners.slice();
|
|
40
|
+
for (const listener of snapshot) {
|
|
41
|
+
setImmediate(() => {
|
|
42
|
+
try {
|
|
43
|
+
listener(event);
|
|
44
|
+
}
|
|
45
|
+
catch {
|
|
46
|
+
// Listener errors must not crash the emitter or affect siblings.
|
|
47
|
+
// Drop silently — the SSE response writers swallow their own
|
|
48
|
+
// socket errors via the unsubscribe path on req.on("close").
|
|
49
|
+
}
|
|
50
|
+
});
|
|
18
51
|
}
|
|
19
52
|
}
|
|
20
53
|
getEventsSince(lastId) {
|
|
@@ -24,6 +57,13 @@ export class SseEmitter {
|
|
|
24
57
|
.all(lastId);
|
|
25
58
|
}
|
|
26
59
|
addListener(listener) {
|
|
60
|
+
// P3: refuse-with-no-op when the cap is reached. Returning a no-op
|
|
61
|
+
// keeps the caller's unsubscribe contract intact (no special-casing
|
|
62
|
+
// upstream) while preventing the array from growing past MAX_SSE_CLIENTS.
|
|
63
|
+
if (this.listeners.length >= MAX_SSE_CLIENTS) {
|
|
64
|
+
this.rejectedCount++;
|
|
65
|
+
return NOOP;
|
|
66
|
+
}
|
|
27
67
|
this.listeners.push(listener);
|
|
28
68
|
return () => {
|
|
29
69
|
this.listeners = this.listeners.filter((l) => l !== listener);
|
|
@@ -32,4 +72,12 @@ export class SseEmitter {
|
|
|
32
72
|
removeAllListeners() {
|
|
33
73
|
this.listeners = [];
|
|
34
74
|
}
|
|
75
|
+
/** P3: introspection for tests + ops dashboards. */
|
|
76
|
+
listenerCount() {
|
|
77
|
+
return this.listeners.length;
|
|
78
|
+
}
|
|
79
|
+
/** P3: count of addListener calls refused due to MAX_SSE_CLIENTS. */
|
|
80
|
+
getRejectedCount() {
|
|
81
|
+
return this.rejectedCount;
|
|
82
|
+
}
|
|
35
83
|
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import type { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
2
|
+
import type { CoordinatorServices } from "../server-setup.js";
|
|
3
|
+
import type { Logger } from "../logger.js";
|
|
4
|
+
/**
|
|
5
|
+
* S1: agent registry MCP tools (4 tools).
|
|
6
|
+
* register_agent, list_agents, heartbeat, agent_activity.
|
|
7
|
+
*/
|
|
8
|
+
export declare function registerAgentTools(server: McpServer, services: CoordinatorServices, mcpLog: Logger): void;
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
/**
|
|
3
|
+
* S1: agent registry MCP tools (4 tools).
|
|
4
|
+
* register_agent, list_agents, heartbeat, agent_activity.
|
|
5
|
+
*/
|
|
6
|
+
export function registerAgentTools(server, services, mcpLog) {
|
|
7
|
+
const { registry, activityTracker, sseEmitter, mqttBridge } = services;
|
|
8
|
+
server.tool("register_agent", "Register agent as online with module list", {
|
|
9
|
+
agent_id: z.string(),
|
|
10
|
+
name: z.string(),
|
|
11
|
+
modules: z.array(z.string()),
|
|
12
|
+
}, async ({ agent_id, name, modules }) => {
|
|
13
|
+
mcpLog.info({ tool: "register_agent", agent_id, name, module_count: modules.length }, "Tool called");
|
|
14
|
+
const agent = registry.register(agent_id, name, modules);
|
|
15
|
+
sseEmitter.emit("agent_online", { agent_id, name, modules });
|
|
16
|
+
mqttBridge.registerAgent(agent_id, name);
|
|
17
|
+
return { content: [{ type: "text", text: JSON.stringify(agent) }] };
|
|
18
|
+
});
|
|
19
|
+
server.tool("list_agents", "List registered agents", {
|
|
20
|
+
online_only: z.boolean().optional(),
|
|
21
|
+
}, async ({ online_only }) => {
|
|
22
|
+
const agents = online_only ? registry.listOnline() : registry.listAll();
|
|
23
|
+
return { content: [{ type: "text", text: JSON.stringify(agents) }] };
|
|
24
|
+
});
|
|
25
|
+
server.tool("heartbeat", "Update agent activity status and last seen timestamp", {
|
|
26
|
+
agent_id: z.string(),
|
|
27
|
+
current_file: z.string().optional(),
|
|
28
|
+
current_thread: z.string().optional(),
|
|
29
|
+
}, async ({ agent_id, current_file, current_thread }) => {
|
|
30
|
+
registry.heartbeat(agent_id);
|
|
31
|
+
activityTracker.heartbeat(agent_id, {
|
|
32
|
+
currentFile: current_file || null,
|
|
33
|
+
currentThread: current_thread || null,
|
|
34
|
+
});
|
|
35
|
+
const activity = activityTracker.getActivity(agent_id);
|
|
36
|
+
sseEmitter.emit("agent_activity", {
|
|
37
|
+
agent_id, activity_status: activity.activity_status,
|
|
38
|
+
current_file: activity.current_file, current_thread: activity.current_thread,
|
|
39
|
+
});
|
|
40
|
+
return { content: [{ type: "text", text: JSON.stringify(activity) }] };
|
|
41
|
+
});
|
|
42
|
+
server.tool("agent_activity", "Get activity status for all online agents", {}, async () => {
|
|
43
|
+
const activities = activityTracker.listAll({ idleAfterMinutes: 5 });
|
|
44
|
+
return { content: [{ type: "text", text: JSON.stringify(activities) }] };
|
|
45
|
+
});
|
|
46
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
2
|
+
import type { CoordinatorServices } from "../server-setup.js";
|
|
3
|
+
import type { Logger } from "../logger.js";
|
|
4
|
+
/**
|
|
5
|
+
* S1 fix (partial): consultation domain tools extracted from server-setup.ts.
|
|
6
|
+
*
|
|
7
|
+
* Originally these 11 tools (announce_work, post_to_thread, propose/approve/
|
|
8
|
+
* contest_resolution, close/cancel_thread, get_thread, get_thread_updates,
|
|
9
|
+
* list_threads, log_action_summary) lived inline in createMcpServer's
|
|
10
|
+
* 420-line body. Extraction here:
|
|
11
|
+
* - reduces server-setup.ts by ~200 lines
|
|
12
|
+
* - groups related tools by domain (= one file per concern)
|
|
13
|
+
* - keeps behavior identical (no signature changes, no SSE/MQTT shape
|
|
14
|
+
* changes — verified by all existing tests)
|
|
15
|
+
*
|
|
16
|
+
* Other tool groups (agents, files, dependencies, mqtt, status) remain in
|
|
17
|
+
* server-setup.ts under their existing section comments. Splitting them is
|
|
18
|
+
* straightforward following this pattern but kept out of this PR to minimize
|
|
19
|
+
* the diff for reviewers.
|
|
20
|
+
*/
|
|
21
|
+
export declare function registerConsultationTools(server: McpServer, services: CoordinatorServices, mcpLog: Logger): void;
|