mcp-coordinator 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +92 -0
- package/dashboard/Dockerfile +19 -0
- package/dashboard/public/index.html +1178 -0
- package/dist/cli/config.d.ts +14 -0
- package/dist/cli/config.js +58 -0
- package/dist/cli/dashboard.d.ts +2 -0
- package/dist/cli/dashboard.js +14 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.js +13 -0
- package/dist/cli/server/index.d.ts +2 -0
- package/dist/cli/server/index.js +11 -0
- package/dist/cli/server/start.d.ts +2 -0
- package/dist/cli/server/start.js +57 -0
- package/dist/cli/server/status.d.ts +2 -0
- package/dist/cli/server/status.js +60 -0
- package/dist/cli/server/stop.d.ts +2 -0
- package/dist/cli/server/stop.js +59 -0
- package/dist/cli/version.d.ts +1 -0
- package/dist/cli/version.js +22 -0
- package/dist/src/agent-activity.d.ts +27 -0
- package/dist/src/agent-activity.js +70 -0
- package/dist/src/agent-registry.d.ts +10 -0
- package/dist/src/agent-registry.js +38 -0
- package/dist/src/auth.d.ts +22 -0
- package/dist/src/auth.js +91 -0
- package/dist/src/conflict-detector.d.ts +17 -0
- package/dist/src/conflict-detector.js +114 -0
- package/dist/src/consultation.d.ts +75 -0
- package/dist/src/consultation.js +332 -0
- package/dist/src/context-provider.d.ts +14 -0
- package/dist/src/context-provider.js +34 -0
- package/dist/src/database.d.ts +4 -0
- package/dist/src/database.js +194 -0
- package/dist/src/db-adapter.d.ts +15 -0
- package/dist/src/db-adapter.js +1 -0
- package/dist/src/dependency-map.d.ts +7 -0
- package/dist/src/dependency-map.js +76 -0
- package/dist/src/file-tracker.d.ts +21 -0
- package/dist/src/file-tracker.js +44 -0
- package/dist/src/impact-scorer.d.ts +31 -0
- package/dist/src/impact-scorer.js +112 -0
- package/dist/src/index.d.ts +2 -0
- package/dist/src/index.js +26 -0
- package/dist/src/introspection.d.ts +24 -0
- package/dist/src/introspection.js +28 -0
- package/dist/src/logger.d.ts +20 -0
- package/dist/src/logger.js +55 -0
- package/dist/src/mqtt-bridge.d.ts +40 -0
- package/dist/src/mqtt-bridge.js +173 -0
- package/dist/src/mqtt-broker.d.ts +23 -0
- package/dist/src/mqtt-broker.js +99 -0
- package/dist/src/plan-quality.d.ts +11 -0
- package/dist/src/plan-quality.js +30 -0
- package/dist/src/quota/credential-reader.d.ts +21 -0
- package/dist/src/quota/credential-reader.js +86 -0
- package/dist/src/quota/quota-cache.d.ts +93 -0
- package/dist/src/quota/quota-cache.js +177 -0
- package/dist/src/quota/quota.d.ts +47 -0
- package/dist/src/quota/quota.js +117 -0
- package/dist/src/serve-http.d.ts +5 -0
- package/dist/src/serve-http.js +775 -0
- package/dist/src/server-setup.d.ts +34 -0
- package/dist/src/server-setup.js +453 -0
- package/dist/src/sse-emitter.d.ts +10 -0
- package/dist/src/sse-emitter.js +35 -0
- package/dist/src/types.d.ts +121 -0
- package/dist/src/types.js +1 -0
- package/package.json +80 -0
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
2
|
+
import { AgentRegistry } from "./agent-registry.js";
|
|
3
|
+
import { Consultation } from "./consultation.js";
|
|
4
|
+
import { ConflictDetector } from "./conflict-detector.js";
|
|
5
|
+
import { DependencyMapper } from "./dependency-map.js";
|
|
6
|
+
import { FileTracker } from "./file-tracker.js";
|
|
7
|
+
import { ImpactScorer } from "./impact-scorer.js";
|
|
8
|
+
import { SummaryContextProvider } from "./context-provider.js";
|
|
9
|
+
import { IntrospectionManager } from "./introspection.js";
|
|
10
|
+
import { SseEmitter } from "./sse-emitter.js";
|
|
11
|
+
import { MqttBridge } from "./mqtt-bridge.js";
|
|
12
|
+
import { AgentActivityTracker } from "./agent-activity.js";
|
|
13
|
+
import { QuotaCache } from "./quota/quota-cache.js";
|
|
14
|
+
import type { CoordinatorConfig } from "./types.js";
|
|
15
|
+
import { type Logger } from "./logger.js";
|
|
16
|
+
export interface CoordinatorServices {
|
|
17
|
+
logger: Logger;
|
|
18
|
+
registry: AgentRegistry;
|
|
19
|
+
activityTracker: AgentActivityTracker;
|
|
20
|
+
consultation: Consultation;
|
|
21
|
+
conflictDetector: ConflictDetector;
|
|
22
|
+
depMap: DependencyMapper;
|
|
23
|
+
fileTracker: FileTracker;
|
|
24
|
+
impactScorer: ImpactScorer;
|
|
25
|
+
introspection: IntrospectionManager;
|
|
26
|
+
contextProvider: SummaryContextProvider;
|
|
27
|
+
sseEmitter: SseEmitter;
|
|
28
|
+
mqttBridge: MqttBridge;
|
|
29
|
+
quotaCache: QuotaCache;
|
|
30
|
+
}
|
|
31
|
+
/** Create shared services (once at startup). */
|
|
32
|
+
export declare function createServices(config: CoordinatorConfig): CoordinatorServices;
|
|
33
|
+
/** Create a new McpServer bound to the shared services (one per MCP session). */
|
|
34
|
+
export declare function createMcpServer(services: CoordinatorServices): McpServer;
|
|
@@ -0,0 +1,453 @@
|
|
|
1
|
+
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
import { initDatabase } from "./database.js";
|
|
4
|
+
import { AgentRegistry } from "./agent-registry.js";
|
|
5
|
+
import { Consultation } from "./consultation.js";
|
|
6
|
+
import { ConflictDetector } from "./conflict-detector.js";
|
|
7
|
+
import { DependencyMapper } from "./dependency-map.js";
|
|
8
|
+
import { FileTracker } from "./file-tracker.js";
|
|
9
|
+
import { ImpactScorer } from "./impact-scorer.js";
|
|
10
|
+
import { SummaryContextProvider } from "./context-provider.js";
|
|
11
|
+
import { IntrospectionManager } from "./introspection.js";
|
|
12
|
+
import { SseEmitter } from "./sse-emitter.js";
|
|
13
|
+
import { MqttBridge } from "./mqtt-bridge.js";
|
|
14
|
+
import { assessPlanQuality } from "./plan-quality.js";
|
|
15
|
+
import { AgentActivityTracker } from "./agent-activity.js";
|
|
16
|
+
import { QuotaCache } from "./quota/quota-cache.js";
|
|
17
|
+
import { createLogger } from "./logger.js";
|
|
18
|
+
import { getVersion } from "../cli/version.js";
|
|
19
|
+
const VERSION = getVersion();
|
|
20
|
+
/** Create shared services (once at startup). */
|
|
21
|
+
export function createServices(config) {
|
|
22
|
+
initDatabase(config.dataDir);
|
|
23
|
+
const logger = createLogger();
|
|
24
|
+
const registry = new AgentRegistry();
|
|
25
|
+
const activityTracker = new AgentActivityTracker(registry);
|
|
26
|
+
const consultation = new Consultation(logger.child({ component: "consultation" }));
|
|
27
|
+
const depMap = new DependencyMapper();
|
|
28
|
+
const fileTracker = new FileTracker();
|
|
29
|
+
const impactScorer = new ImpactScorer(registry, fileTracker, consultation);
|
|
30
|
+
const introspection = new IntrospectionManager();
|
|
31
|
+
const conflictDetector = new ConflictDetector(consultation, depMap, fileTracker, logger.child({ component: "conflict" }));
|
|
32
|
+
const contextProvider = new SummaryContextProvider(registry, consultation, fileTracker);
|
|
33
|
+
const sseEmitter = new SseEmitter();
|
|
34
|
+
const mqttBridge = new MqttBridge(logger.child({ component: "mqtt" }));
|
|
35
|
+
// Quota cache — macOS-only for now, Linux/Windows stubs return 503 via the
|
|
36
|
+
// /api/quota handler so raids keep running without a quota guardrail there.
|
|
37
|
+
// onRefresh fans the new data out to dashboard (SSE) + any live listener (MQTT)
|
|
38
|
+
// so the UI reflects quota moves without polling.
|
|
39
|
+
const quotaCache = new QuotaCache({
|
|
40
|
+
logger: logger.child({ component: "quota" }),
|
|
41
|
+
onRefresh: (info) => {
|
|
42
|
+
sseEmitter.emit("quota_update", {
|
|
43
|
+
five_hour: info.fiveHour,
|
|
44
|
+
seven_day: info.sevenDay,
|
|
45
|
+
seven_day_sonnet: info.sevenDaySonnet,
|
|
46
|
+
fetched_at: info.fetchedAt,
|
|
47
|
+
});
|
|
48
|
+
mqttBridge.publishQuotaUpdate(info);
|
|
49
|
+
},
|
|
50
|
+
});
|
|
51
|
+
// Hybrid refresh: keep the cache warm whenever agents are online. Tap into
|
|
52
|
+
// SSE events for agent_online/agent_offline since they're already emitted by
|
|
53
|
+
// the REST handler on /api/register and the offline hook. Avoids plumbing a
|
|
54
|
+
// dedicated observer through AgentRegistry.
|
|
55
|
+
sseEmitter.addListener((event) => {
|
|
56
|
+
if (event.type === "agent_online")
|
|
57
|
+
quotaCache.onAgentActive();
|
|
58
|
+
else if (event.type === "agent_offline")
|
|
59
|
+
quotaCache.onAgentInactive();
|
|
60
|
+
});
|
|
61
|
+
// Centralized resolution → SSE + MQTT
|
|
62
|
+
consultation.onResolve((event) => {
|
|
63
|
+
sseEmitter.emit("thread_resolved", {
|
|
64
|
+
thread_id: event.thread_id,
|
|
65
|
+
resolution_type: event.resolution_type,
|
|
66
|
+
resolution: event.resolution_summary,
|
|
67
|
+
approved_by: event.approved_by,
|
|
68
|
+
approved_by_name: event.approved_by_name,
|
|
69
|
+
created_at: event.created_at,
|
|
70
|
+
resolved_at: event.resolved_at,
|
|
71
|
+
had_messages: event.had_messages,
|
|
72
|
+
});
|
|
73
|
+
if (event.resolution_type !== "auto_resolved") {
|
|
74
|
+
mqttBridge.publishResolution(event.thread_id, "resolved", event.resolution_summary || "");
|
|
75
|
+
}
|
|
76
|
+
});
|
|
77
|
+
return {
|
|
78
|
+
logger, registry, activityTracker, consultation, conflictDetector,
|
|
79
|
+
depMap, fileTracker, impactScorer, introspection, contextProvider, sseEmitter, mqttBridge, quotaCache,
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
/** Create a new McpServer bound to the shared services (one per MCP session). */
|
|
83
|
+
export function createMcpServer(services) {
|
|
84
|
+
const { registry, activityTracker, consultation, conflictDetector, depMap, fileTracker, impactScorer, introspection, contextProvider, sseEmitter, mqttBridge } = services;
|
|
85
|
+
const mcpLog = services.logger.child({ component: "mcp" });
|
|
86
|
+
const server = new McpServer({
|
|
87
|
+
name: "mcp-coordinator-v3",
|
|
88
|
+
version: VERSION,
|
|
89
|
+
});
|
|
90
|
+
// ── AGENT REGISTRY TOOLS ──
|
|
91
|
+
server.tool("register_agent", "Register agent as online with module list", {
|
|
92
|
+
agent_id: z.string(),
|
|
93
|
+
name: z.string(),
|
|
94
|
+
modules: z.array(z.string()),
|
|
95
|
+
}, async ({ agent_id, name, modules }) => {
|
|
96
|
+
mcpLog.info({ tool: "register_agent", agent_id, name, module_count: modules.length }, "Tool called");
|
|
97
|
+
const agent = registry.register(agent_id, name, modules);
|
|
98
|
+
sseEmitter.emit("agent_online", { agent_id, name, modules });
|
|
99
|
+
mqttBridge.registerAgent(agent_id, name);
|
|
100
|
+
return { content: [{ type: "text", text: JSON.stringify(agent) }] };
|
|
101
|
+
});
|
|
102
|
+
server.tool("list_agents", "List registered agents", {
|
|
103
|
+
online_only: z.boolean().optional(),
|
|
104
|
+
}, async ({ online_only }) => {
|
|
105
|
+
const agents = online_only ? registry.listOnline() : registry.listAll();
|
|
106
|
+
return { content: [{ type: "text", text: JSON.stringify(agents) }] };
|
|
107
|
+
});
|
|
108
|
+
server.tool("heartbeat", "Update agent activity status and last seen timestamp", {
|
|
109
|
+
agent_id: z.string(),
|
|
110
|
+
current_file: z.string().optional(),
|
|
111
|
+
current_thread: z.string().optional(),
|
|
112
|
+
}, async ({ agent_id, current_file, current_thread }) => {
|
|
113
|
+
registry.heartbeat(agent_id);
|
|
114
|
+
activityTracker.heartbeat(agent_id, {
|
|
115
|
+
currentFile: current_file || null,
|
|
116
|
+
currentThread: current_thread || null,
|
|
117
|
+
});
|
|
118
|
+
const activity = activityTracker.getActivity(agent_id);
|
|
119
|
+
sseEmitter.emit("agent_activity", {
|
|
120
|
+
agent_id, activity_status: activity.activity_status,
|
|
121
|
+
current_file: activity.current_file, current_thread: activity.current_thread,
|
|
122
|
+
});
|
|
123
|
+
return { content: [{ type: "text", text: JSON.stringify(activity) }] };
|
|
124
|
+
});
|
|
125
|
+
server.tool("agent_activity", "Get activity status for all online agents", {}, async () => {
|
|
126
|
+
const activities = activityTracker.listAll({ idleAfterMinutes: 5 });
|
|
127
|
+
return { content: [{ type: "text", text: JSON.stringify(activities) }] };
|
|
128
|
+
});
|
|
129
|
+
// ── CONSULTATION TOOLS ──
|
|
130
|
+
server.tool("announce_work", "Open a consultation thread before starting work", {
|
|
131
|
+
agent_id: z.string(),
|
|
132
|
+
subject: z.string(),
|
|
133
|
+
plan: z.string().optional(),
|
|
134
|
+
target_modules: z.array(z.string()),
|
|
135
|
+
target_files: z.array(z.string()),
|
|
136
|
+
depends_on_files: z.array(z.string()).optional(),
|
|
137
|
+
exports_affected: z.array(z.string()).optional(),
|
|
138
|
+
keep_open: z.boolean().optional().describe("Keep thread open even if no agents are concerned (for manual coordination like games or debates)"),
|
|
139
|
+
assigned_to: z.string().optional().describe("Directed-dispatch: only this agent_id will be allowed to claim the thread. Use for lead→worker handoffs in maitre/chaine/relais presets. Implies keep_open=true."),
|
|
140
|
+
}, async ({ agent_id, subject, plan, target_modules, target_files, depends_on_files, exports_affected, keep_open, assigned_to }) => {
|
|
141
|
+
mcpLog.info({ tool: "announce_work", agent_id, subject, target_modules, target_files, assigned_to }, "Tool called");
|
|
142
|
+
// Quality gate on plan
|
|
143
|
+
const planQuality = assessPlanQuality(plan);
|
|
144
|
+
const effectiveMode = planQuality.mode;
|
|
145
|
+
const conflicts = conflictDetector.detect({ agent_id, target_modules, target_files });
|
|
146
|
+
const thread = consultation.announceWork({
|
|
147
|
+
agent_id, subject, plan, target_modules, target_files, depends_on_files, exports_affected, keep_open, assigned_to,
|
|
148
|
+
});
|
|
149
|
+
// Store conflicts on thread
|
|
150
|
+
if (conflicts.length > 0) {
|
|
151
|
+
const db = (await import("./database.js")).getDb();
|
|
152
|
+
db.prepare("UPDATE threads SET conflicts = ? WHERE id = ?")
|
|
153
|
+
.run(JSON.stringify(conflicts), thread.id);
|
|
154
|
+
}
|
|
155
|
+
// Impact scoring: categorize all online agents
|
|
156
|
+
const categorized = impactScorer.categorize({
|
|
157
|
+
agent_id, target_modules, target_files, depends_on_files, exports_affected,
|
|
158
|
+
});
|
|
159
|
+
// Override expected_respondents with concerned agents from scorer
|
|
160
|
+
{
|
|
161
|
+
const db = (await import("./database.js")).getDb();
|
|
162
|
+
const concernedIds = categorized.concerned.map(s => s.agent_id);
|
|
163
|
+
db.prepare("UPDATE threads SET expected_respondents = ? WHERE id = ?")
|
|
164
|
+
.run(JSON.stringify(concernedIds), thread.id);
|
|
165
|
+
// Only auto-resolve when truly alone — no other online agents.
|
|
166
|
+
// If peers are online but not yet concerned, keep the thread open so
|
|
167
|
+
// a subsequent announce can still match via Layer 0. Thread timeouts
|
|
168
|
+
// naturally if no one joins.
|
|
169
|
+
const otherOnlineCount = registry.listOnline().filter((a) => a.id !== agent_id).length;
|
|
170
|
+
const shouldAutoResolve = concernedIds.length === 0 && otherOnlineCount === 0;
|
|
171
|
+
if (shouldAutoResolve && thread.status === "open" && !keep_open) {
|
|
172
|
+
db.prepare("UPDATE threads SET status = 'resolved', resolved_at = ? WHERE id = ?")
|
|
173
|
+
.run(new Date().toISOString(), thread.id);
|
|
174
|
+
consultation.emitResolution(thread.id, "auto_resolved");
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
// Emit impact_scored SSE events for all agents
|
|
178
|
+
for (const s of [...categorized.concerned, ...categorized.gray_zone, ...categorized.pass]) {
|
|
179
|
+
sseEmitter.emit("impact_scored", {
|
|
180
|
+
thread_id: thread.id, agent_id: s.agent_id, agent_name: s.agent_name,
|
|
181
|
+
score: s.score, reasons: s.reasons, category: s.score >= 90 ? "concerned" : s.score >= 30 ? "gray_zone" : "pass",
|
|
182
|
+
});
|
|
183
|
+
}
|
|
184
|
+
// Create introspection records and emit introspection_requested for gray_zone agents
|
|
185
|
+
for (const s of categorized.gray_zone) {
|
|
186
|
+
introspection.create({ thread_id: thread.id, agent_id: s.agent_id, score: s.score, reasons: s.reasons });
|
|
187
|
+
sseEmitter.emit("introspection_requested", {
|
|
188
|
+
thread_id: thread.id, agent_id: s.agent_id, agent_name: s.agent_name, score: s.score, reasons: s.reasons,
|
|
189
|
+
});
|
|
190
|
+
}
|
|
191
|
+
// Emit downgrade event when plan is provided but quality is insufficient
|
|
192
|
+
if (plan && effectiveMode === "discovery") {
|
|
193
|
+
sseEmitter.emit("impact_scored", {
|
|
194
|
+
thread_id: thread.id,
|
|
195
|
+
agent_id: agent_id,
|
|
196
|
+
agent_name: registry.get(agent_id)?.name || agent_id,
|
|
197
|
+
score: planQuality.score,
|
|
198
|
+
reasons: [`plan downgraded: score ${planQuality.score}/3 — ${!planQuality.checks.mentions_files ? 'no files' : ''} ${!planQuality.checks.concrete_approach ? 'vague approach' : ''} ${!planQuality.checks.sufficient_detail ? 'too short' : ''}`.trim()],
|
|
199
|
+
category: "plan_quality",
|
|
200
|
+
});
|
|
201
|
+
}
|
|
202
|
+
const updated = consultation.getThread(thread.id);
|
|
203
|
+
const respondents = JSON.parse(updated.expected_respondents || "[]");
|
|
204
|
+
sseEmitter.emit("thread_opened", {
|
|
205
|
+
thread_id: thread.id, initiator: agent_id, subject, target_modules, conflicts,
|
|
206
|
+
expected_respondents: respondents,
|
|
207
|
+
mode: effectiveMode,
|
|
208
|
+
plan: plan || null,
|
|
209
|
+
plan_quality: planQuality,
|
|
210
|
+
});
|
|
211
|
+
mqttBridge.publishConsultation(thread.id, agent_id, subject, target_modules);
|
|
212
|
+
// Gather context from concerned agents for the initiator
|
|
213
|
+
const contextForInitiator = respondents.map((rid) => contextProvider.getRelevantContext(rid, { thread_id: updated.id, subject, target_modules, target_files })).filter((ctx) => ctx.modules.length > 0);
|
|
214
|
+
return {
|
|
215
|
+
content: [{
|
|
216
|
+
type: "text",
|
|
217
|
+
text: JSON.stringify({ thread: updated, conflicts, context: contextForInitiator, impact: categorized }),
|
|
218
|
+
}],
|
|
219
|
+
};
|
|
220
|
+
});
|
|
221
|
+
server.tool("post_to_thread", "Post a message to a consultation thread", {
|
|
222
|
+
thread_id: z.string(),
|
|
223
|
+
agent_id: z.string(),
|
|
224
|
+
agent_name: z.string().optional(),
|
|
225
|
+
type: z.enum(["context", "suggestion", "warning"]),
|
|
226
|
+
content: z.string(),
|
|
227
|
+
context_snapshot: z.string().optional(),
|
|
228
|
+
in_reply_to: z.string().optional(),
|
|
229
|
+
}, async ({ thread_id, agent_id, agent_name, type, content, context_snapshot, in_reply_to }) => {
|
|
230
|
+
mcpLog.info({ tool: "post_to_thread", thread_id, agent_id, type }, "Tool called");
|
|
231
|
+
const msg = consultation.postToThread({
|
|
232
|
+
thread_id, agent_id, agent_name, type, content, context_snapshot, in_reply_to,
|
|
233
|
+
});
|
|
234
|
+
const thread = consultation.getThread(thread_id);
|
|
235
|
+
sseEmitter.emit("message_posted", {
|
|
236
|
+
thread_id, agent_id, agent_name: agent_name || agent_id,
|
|
237
|
+
type, content, round: thread?.round || 1,
|
|
238
|
+
token_estimate: msg.token_estimate || 0,
|
|
239
|
+
});
|
|
240
|
+
mqttBridge.publishMessage(thread_id, agent_id, type, content);
|
|
241
|
+
return { content: [{ type: "text", text: JSON.stringify(msg) }] };
|
|
242
|
+
});
|
|
243
|
+
server.tool("propose_resolution", "Propose a resolution for the consultation", {
|
|
244
|
+
thread_id: z.string(),
|
|
245
|
+
agent_id: z.string(),
|
|
246
|
+
summary: z.string(),
|
|
247
|
+
plan: z.string().optional(),
|
|
248
|
+
}, async ({ thread_id, agent_id, summary, plan }) => {
|
|
249
|
+
mcpLog.info({ tool: "propose_resolution", thread_id, agent_id }, "Tool called");
|
|
250
|
+
consultation.proposeResolution(thread_id, agent_id, summary);
|
|
251
|
+
sseEmitter.emit("resolution_proposed", { thread_id, agent_id, summary });
|
|
252
|
+
mqttBridge.publishResolution(thread_id, "resolving", summary);
|
|
253
|
+
const thread = consultation.getThread(thread_id);
|
|
254
|
+
return { content: [{ type: "text", text: JSON.stringify(thread) }] };
|
|
255
|
+
});
|
|
256
|
+
server.tool("approve_resolution", "Approve the proposed resolution", {
|
|
257
|
+
thread_id: z.string(),
|
|
258
|
+
agent_id: z.string(),
|
|
259
|
+
}, async ({ thread_id, agent_id }) => {
|
|
260
|
+
mcpLog.info({ tool: "approve_resolution", thread_id, agent_id }, "Tool called");
|
|
261
|
+
const agentInfo = registry.get(agent_id);
|
|
262
|
+
consultation.approveResolution(thread_id, agent_id, agentInfo?.name);
|
|
263
|
+
const thread = consultation.getThread(thread_id);
|
|
264
|
+
return { content: [{ type: "text", text: JSON.stringify(thread) }] };
|
|
265
|
+
});
|
|
266
|
+
server.tool("contest_resolution", "Contest the proposed resolution", {
|
|
267
|
+
thread_id: z.string(),
|
|
268
|
+
agent_id: z.string(),
|
|
269
|
+
reason: z.string(),
|
|
270
|
+
}, async ({ thread_id, agent_id, reason }) => {
|
|
271
|
+
mcpLog.info({ tool: "contest_resolution", thread_id, agent_id }, "Tool called");
|
|
272
|
+
consultation.contestResolution(thread_id, agent_id, reason);
|
|
273
|
+
const thread = consultation.getThread(thread_id);
|
|
274
|
+
return { content: [{ type: "text", text: JSON.stringify(thread) }] };
|
|
275
|
+
});
|
|
276
|
+
server.tool("close_thread", "Close a consultation thread", {
|
|
277
|
+
thread_id: z.string(),
|
|
278
|
+
agent_id: z.string(),
|
|
279
|
+
summary: z.string(),
|
|
280
|
+
}, async ({ thread_id, agent_id, summary }) => {
|
|
281
|
+
mcpLog.info({ tool: "close_thread", thread_id, agent_id }, "Tool called");
|
|
282
|
+
consultation.closeThread(thread_id, agent_id, summary);
|
|
283
|
+
return { content: [{ type: "text", text: "closed" }] };
|
|
284
|
+
});
|
|
285
|
+
server.tool("cancel_thread", "Cancel a consultation thread", {
|
|
286
|
+
thread_id: z.string(),
|
|
287
|
+
agent_id: z.string(),
|
|
288
|
+
reason: z.string().optional(),
|
|
289
|
+
}, async ({ thread_id, agent_id, reason }) => {
|
|
290
|
+
mcpLog.info({ tool: "cancel_thread", thread_id, agent_id }, "Tool called");
|
|
291
|
+
consultation.cancelThread(thread_id, agent_id, reason);
|
|
292
|
+
sseEmitter.emit("thread_cancelled", { thread_id, reason });
|
|
293
|
+
return { content: [{ type: "text", text: "cancelled" }] };
|
|
294
|
+
});
|
|
295
|
+
server.tool("get_thread", "Get a thread with all messages", {
|
|
296
|
+
thread_id: z.string(),
|
|
297
|
+
}, async ({ thread_id }) => {
|
|
298
|
+
const result = consultation.getThreadWithMessages(thread_id);
|
|
299
|
+
mcpLog.debug({ tool: "get_thread", thread_id, message_count: result?.messages.length }, "Tool called");
|
|
300
|
+
return { content: [{ type: "text", text: JSON.stringify(result) }] };
|
|
301
|
+
});
|
|
302
|
+
server.tool("get_thread_updates", "Get new messages since timestamp", {
|
|
303
|
+
agent_id: z.string(),
|
|
304
|
+
since: z.string().optional(),
|
|
305
|
+
}, async ({ agent_id, since }) => {
|
|
306
|
+
const updates = consultation.getThreadUpdates(agent_id, since);
|
|
307
|
+
return { content: [{ type: "text", text: JSON.stringify(updates) }] };
|
|
308
|
+
});
|
|
309
|
+
server.tool("list_threads", "List consultation threads", {
|
|
310
|
+
status: z.string().optional(),
|
|
311
|
+
agent_id: z.string().optional(),
|
|
312
|
+
module: z.string().optional(),
|
|
313
|
+
assigned_to_me: z.string().optional().describe("Filter to threads claimable by this agent_id: open pool (assigned_to NULL) OR directed to me. Use for worker agents receiving directed dispatches."),
|
|
314
|
+
}, async ({ status, agent_id, module, assigned_to_me }) => {
|
|
315
|
+
const threads = consultation.listThreads({ status, agent_id, module, assigned_to_me });
|
|
316
|
+
mcpLog.debug({ tool: "list_threads", status, agent_id, module, assigned_to_me, result_count: threads.length }, "Tool called");
|
|
317
|
+
return { content: [{ type: "text", text: JSON.stringify(threads) }] };
|
|
318
|
+
});
|
|
319
|
+
server.tool("log_action_summary", "Log a one-liner summary of an action", {
|
|
320
|
+
session_id: z.string(),
|
|
321
|
+
agent_id: z.string(),
|
|
322
|
+
file_path: z.string().optional(),
|
|
323
|
+
summary: z.string(),
|
|
324
|
+
}, async ({ session_id, agent_id, file_path, summary }) => {
|
|
325
|
+
const result = consultation.logActionSummary({ session_id, agent_id, file_path, summary });
|
|
326
|
+
sseEmitter.emit("action_summary", { agent_id, file_path, summary });
|
|
327
|
+
return { content: [{ type: "text", text: JSON.stringify(result) }] };
|
|
328
|
+
});
|
|
329
|
+
// ── FILE TRACKING TOOLS ──
|
|
330
|
+
server.tool("hot_files", "List files modified by multiple agents", {
|
|
331
|
+
since_minutes: z.number().optional(),
|
|
332
|
+
}, async ({ since_minutes }) => {
|
|
333
|
+
const files = fileTracker.getHotFiles(since_minutes || 30);
|
|
334
|
+
return { content: [{ type: "text", text: JSON.stringify(files) }] };
|
|
335
|
+
});
|
|
336
|
+
server.tool("get_session_files", "Get files modified in a session", {
|
|
337
|
+
session_id: z.string(),
|
|
338
|
+
}, async ({ session_id }) => {
|
|
339
|
+
const files = fileTracker.getBySession(session_id);
|
|
340
|
+
return { content: [{ type: "text", text: JSON.stringify(files) }] };
|
|
341
|
+
});
|
|
342
|
+
server.tool("check_file_conflict", "Check if another agent is editing a file", {
|
|
343
|
+
file_path: z.string(),
|
|
344
|
+
agent_id: z.string(),
|
|
345
|
+
within_minutes: z.number().optional(),
|
|
346
|
+
}, async ({ file_path, agent_id, within_minutes }) => {
|
|
347
|
+
const result = fileTracker.checkFileConflict(file_path, agent_id, within_minutes || 30);
|
|
348
|
+
return { content: [{ type: "text", text: JSON.stringify(result) }] };
|
|
349
|
+
});
|
|
350
|
+
// ── DEPENDENCY MAP TOOLS ──
|
|
351
|
+
server.tool("set_dependency_map", "Load module dependency graph", {
|
|
352
|
+
modules: z.string(), // JSON DependencyMap
|
|
353
|
+
}, async ({ modules }) => {
|
|
354
|
+
const map = JSON.parse(modules);
|
|
355
|
+
depMap.setMap(map);
|
|
356
|
+
return { content: [{ type: "text", text: "ok" }] };
|
|
357
|
+
});
|
|
358
|
+
server.tool("get_blast_radius", "Calculate impact of changes to a module", {
|
|
359
|
+
module_id: z.string(),
|
|
360
|
+
}, async ({ module_id }) => {
|
|
361
|
+
const radius = depMap.getBlastRadius(module_id);
|
|
362
|
+
return { content: [{ type: "text", text: JSON.stringify(radius) }] };
|
|
363
|
+
});
|
|
364
|
+
server.tool("get_module_info", "Get module dependency info", {
|
|
365
|
+
module_id: z.string(),
|
|
366
|
+
}, async ({ module_id }) => {
|
|
367
|
+
const info = depMap.getModuleInfo(module_id);
|
|
368
|
+
return { content: [{ type: "text", text: JSON.stringify(info) }] };
|
|
369
|
+
});
|
|
370
|
+
// ── STATUS TOOL ──
|
|
371
|
+
server.tool("coordinator_status", "Full system status", {}, async () => {
|
|
372
|
+
const online = registry.listOnline();
|
|
373
|
+
const openThreads = consultation.listThreads({ status: "open" });
|
|
374
|
+
const resolvingThreads = consultation.listThreads({ status: "resolving" });
|
|
375
|
+
const hotFiles = fileTracker.getHotFiles(30);
|
|
376
|
+
const status = {
|
|
377
|
+
agents_online: online.length,
|
|
378
|
+
agents: online.map((a) => ({ id: a.id, name: a.name, modules: JSON.parse(a.modules) })),
|
|
379
|
+
open_threads: openThreads.length,
|
|
380
|
+
resolving_threads: resolvingThreads.length,
|
|
381
|
+
hot_files: hotFiles.length,
|
|
382
|
+
mqtt_connected: mqttBridge.isConnected(),
|
|
383
|
+
};
|
|
384
|
+
mcpLog.debug({ tool: "coordinator_status", agents_online: online.length, open_threads: openThreads.length }, "Tool called");
|
|
385
|
+
return { content: [{ type: "text", text: JSON.stringify(status) }] };
|
|
386
|
+
});
|
|
387
|
+
// ── COORDINATION HELPERS ──
|
|
388
|
+
server.tool("wait_for_peers", "Block until at least N other online agents are registered, or timeout. Use before the first announce_work to avoid the race where one agent announces before peers have booted.", {
|
|
389
|
+
agent_id: z.string(),
|
|
390
|
+
min_peers: z.number().optional(),
|
|
391
|
+
timeout_seconds: z.number().optional(),
|
|
392
|
+
}, async ({ agent_id, min_peers, timeout_seconds }) => {
|
|
393
|
+
const targetPeers = min_peers ?? 1;
|
|
394
|
+
const timeoutMs = (timeout_seconds ?? 30) * 1000;
|
|
395
|
+
const pollIntervalMs = 1000;
|
|
396
|
+
const startedAt = Date.now();
|
|
397
|
+
mcpLog.info({ tool: "wait_for_peers", agent_id, min_peers: targetPeers, timeout_seconds: timeoutMs / 1000 }, "Tool called");
|
|
398
|
+
while (Date.now() - startedAt < timeoutMs) {
|
|
399
|
+
const peers = registry.listOnline().filter((a) => a.id !== agent_id);
|
|
400
|
+
if (peers.length >= targetPeers) {
|
|
401
|
+
return {
|
|
402
|
+
content: [{
|
|
403
|
+
type: "text",
|
|
404
|
+
text: JSON.stringify({
|
|
405
|
+
ready: true,
|
|
406
|
+
online_peers: peers.map((p) => ({ id: p.id, name: p.name })),
|
|
407
|
+
waited_ms: Date.now() - startedAt,
|
|
408
|
+
}),
|
|
409
|
+
}],
|
|
410
|
+
};
|
|
411
|
+
}
|
|
412
|
+
await new Promise((r) => setTimeout(r, pollIntervalMs));
|
|
413
|
+
}
|
|
414
|
+
const finalPeers = registry.listOnline().filter((a) => a.id !== agent_id);
|
|
415
|
+
return {
|
|
416
|
+
content: [{
|
|
417
|
+
type: "text",
|
|
418
|
+
text: JSON.stringify({
|
|
419
|
+
ready: false,
|
|
420
|
+
timeout: true,
|
|
421
|
+
online_peers: finalPeers.map((p) => ({ id: p.id, name: p.name })),
|
|
422
|
+
waited_ms: Date.now() - startedAt,
|
|
423
|
+
}),
|
|
424
|
+
}],
|
|
425
|
+
};
|
|
426
|
+
});
|
|
427
|
+
// ── MQTT LISTENER TOOLS (replaces standalone mqtt-mcp-bridge) ──
|
|
428
|
+
server.tool("wait_for_message", "Block until an MQTT consultation message arrives or timeout", {
|
|
429
|
+
agent_id: z.string(),
|
|
430
|
+
timeout_seconds: z.number().optional(),
|
|
431
|
+
}, async ({ agent_id, timeout_seconds }) => {
|
|
432
|
+
const timeoutMs = (timeout_seconds || 15) * 1000;
|
|
433
|
+
const msg = await mqttBridge.waitForMessage(agent_id, timeoutMs);
|
|
434
|
+
if (msg) {
|
|
435
|
+
return { content: [{ type: "text", text: JSON.stringify(msg) }] };
|
|
436
|
+
}
|
|
437
|
+
return { content: [{ type: "text", text: JSON.stringify({ timeout: true }) }] };
|
|
438
|
+
});
|
|
439
|
+
server.tool("get_queued_messages", "Get all queued MQTT messages without blocking", {
|
|
440
|
+
agent_id: z.string(),
|
|
441
|
+
}, async ({ agent_id }) => {
|
|
442
|
+
const messages = mqttBridge.getQueuedMessages(agent_id);
|
|
443
|
+
return { content: [{ type: "text", text: JSON.stringify(messages) }] };
|
|
444
|
+
});
|
|
445
|
+
server.tool("mqtt_publish", "Publish a message to an MQTT topic", {
|
|
446
|
+
topic: z.string(),
|
|
447
|
+
payload: z.string(),
|
|
448
|
+
}, async ({ topic, payload }) => {
|
|
449
|
+
mqttBridge.mqttPublish(topic, payload);
|
|
450
|
+
return { content: [{ type: "text", text: "published" }] };
|
|
451
|
+
});
|
|
452
|
+
return server;
|
|
453
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { CoordinatorEvent, EventType } from "./types.js";
|
|
2
|
+
type EventListener = (event: CoordinatorEvent) => void;
|
|
3
|
+
export declare class SseEmitter {
|
|
4
|
+
private listeners;
|
|
5
|
+
emit(type: EventType, payload: Record<string, unknown>): void;
|
|
6
|
+
getEventsSince(lastId: number): CoordinatorEvent[];
|
|
7
|
+
addListener(listener: EventListener): () => void;
|
|
8
|
+
removeAllListeners(): void;
|
|
9
|
+
}
|
|
10
|
+
export {};
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { getDb } from "./database.js";
|
|
2
|
+
export class SseEmitter {
|
|
3
|
+
listeners = [];
|
|
4
|
+
emit(type, payload) {
|
|
5
|
+
const db = getDb();
|
|
6
|
+
const payloadStr = JSON.stringify(payload);
|
|
7
|
+
const result = db
|
|
8
|
+
.prepare("INSERT INTO events (type, payload) VALUES (?, ?)")
|
|
9
|
+
.run(type, payloadStr);
|
|
10
|
+
const event = {
|
|
11
|
+
id: result.lastInsertRowid,
|
|
12
|
+
type,
|
|
13
|
+
payload: payloadStr,
|
|
14
|
+
created_at: new Date().toISOString(),
|
|
15
|
+
};
|
|
16
|
+
for (const listener of this.listeners) {
|
|
17
|
+
listener(event);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
getEventsSince(lastId) {
|
|
21
|
+
const db = getDb();
|
|
22
|
+
return db
|
|
23
|
+
.prepare("SELECT * FROM events WHERE id > ? ORDER BY id")
|
|
24
|
+
.all(lastId);
|
|
25
|
+
}
|
|
26
|
+
addListener(listener) {
|
|
27
|
+
this.listeners.push(listener);
|
|
28
|
+
return () => {
|
|
29
|
+
this.listeners = this.listeners.filter((l) => l !== listener);
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
removeAllListeners() {
|
|
33
|
+
this.listeners = [];
|
|
34
|
+
}
|
|
35
|
+
}
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
export type AgentConnectionStatus = "online" | "offline";
|
|
2
|
+
export type ActivityStatus = "working" | "idle" | "waiting" | "offline";
|
|
3
|
+
export interface Agent {
|
|
4
|
+
id: string;
|
|
5
|
+
name: string;
|
|
6
|
+
modules: string;
|
|
7
|
+
status: AgentConnectionStatus;
|
|
8
|
+
registered_at: string;
|
|
9
|
+
last_seen_at: string;
|
|
10
|
+
}
|
|
11
|
+
export interface AgentActivity {
|
|
12
|
+
agent_id: string;
|
|
13
|
+
activity_status: ActivityStatus;
|
|
14
|
+
current_file: string | null;
|
|
15
|
+
current_thread: string | null;
|
|
16
|
+
last_activity_at: string;
|
|
17
|
+
}
|
|
18
|
+
export type ThreadStatus = "open" | "resolving" | "resolved" | "cancelled" | "poisoned";
|
|
19
|
+
export type ResolutionType = "consensus" | "auto_resolved" | "timeout" | "closed" | "max_rounds" | "agent_departure";
|
|
20
|
+
export interface Thread {
|
|
21
|
+
id: string;
|
|
22
|
+
initiator_id: string;
|
|
23
|
+
subject: string;
|
|
24
|
+
plan: string | null;
|
|
25
|
+
target_modules: string;
|
|
26
|
+
target_files: string;
|
|
27
|
+
status: ThreadStatus;
|
|
28
|
+
resolution_summary: string | null;
|
|
29
|
+
conflicts: string | null;
|
|
30
|
+
round: number;
|
|
31
|
+
max_rounds: number;
|
|
32
|
+
timeout_seconds: number;
|
|
33
|
+
created_at: string;
|
|
34
|
+
resolved_at: string | null;
|
|
35
|
+
expected_respondents: string | null;
|
|
36
|
+
depends_on_files: string | null;
|
|
37
|
+
exports_affected: string | null;
|
|
38
|
+
claimed_by: string | null;
|
|
39
|
+
claimed_at: string | null;
|
|
40
|
+
unclaim_count?: number | null;
|
|
41
|
+
assigned_to?: string | null;
|
|
42
|
+
}
|
|
43
|
+
export type MessageType = "context" | "suggestion" | "warning" | "resolution" | "approve" | "contest";
|
|
44
|
+
export interface ThreadMessage {
|
|
45
|
+
id: string;
|
|
46
|
+
thread_id: string;
|
|
47
|
+
agent_id: string;
|
|
48
|
+
agent_name: string | null;
|
|
49
|
+
type: MessageType;
|
|
50
|
+
content: string;
|
|
51
|
+
context_snapshot: string | null;
|
|
52
|
+
in_reply_to: string | null;
|
|
53
|
+
round: number;
|
|
54
|
+
token_estimate: number;
|
|
55
|
+
created_at: string;
|
|
56
|
+
}
|
|
57
|
+
export interface ActionSummary {
|
|
58
|
+
id: string;
|
|
59
|
+
session_id: string;
|
|
60
|
+
agent_id: string;
|
|
61
|
+
file_path: string | null;
|
|
62
|
+
summary: string;
|
|
63
|
+
created_at: string;
|
|
64
|
+
}
|
|
65
|
+
export type EventType = "agent_online" | "agent_offline" | "thread_opened" | "message_posted" | "resolution_proposed" | "thread_resolved" | "thread_cancelled" | "file_edited" | "action_summary" | "impact_scored" | "introspection_requested" | "introspection_completed" | "agent_activity" | "task_claimed" | "token_usage" | "quota_update";
|
|
66
|
+
export interface CoordinatorEvent {
|
|
67
|
+
id?: number;
|
|
68
|
+
type: EventType;
|
|
69
|
+
payload: string;
|
|
70
|
+
created_at?: string;
|
|
71
|
+
}
|
|
72
|
+
export interface ConflictReport {
|
|
73
|
+
type: "module_overlap" | "api_contract" | "file_overlap" | "dependency_chain";
|
|
74
|
+
severity: "warning" | "info";
|
|
75
|
+
agent_id: string;
|
|
76
|
+
agent_name: string;
|
|
77
|
+
description: string;
|
|
78
|
+
details: string;
|
|
79
|
+
}
|
|
80
|
+
export interface BlastRadius {
|
|
81
|
+
module_id: string;
|
|
82
|
+
direct_dependents: string[];
|
|
83
|
+
indirect_dependents: string[];
|
|
84
|
+
affected_exports: string[];
|
|
85
|
+
active_threads_in_radius: Thread[];
|
|
86
|
+
}
|
|
87
|
+
export interface ModuleInfo {
|
|
88
|
+
module_id: string;
|
|
89
|
+
depends_on: string[];
|
|
90
|
+
exports: string[];
|
|
91
|
+
owners: string[];
|
|
92
|
+
}
|
|
93
|
+
export type DependencyMap = Record<string, ModuleInfo>;
|
|
94
|
+
export interface FileActivity {
|
|
95
|
+
id?: number;
|
|
96
|
+
session_id: string;
|
|
97
|
+
agent_id: string;
|
|
98
|
+
agent_name?: string;
|
|
99
|
+
tool_name: string;
|
|
100
|
+
file_path: string;
|
|
101
|
+
module: string;
|
|
102
|
+
created_at?: string;
|
|
103
|
+
}
|
|
104
|
+
export interface AgentContext {
|
|
105
|
+
agent_id: string;
|
|
106
|
+
modules: string[];
|
|
107
|
+
recent_files: string[];
|
|
108
|
+
action_summaries: ActionSummary[];
|
|
109
|
+
}
|
|
110
|
+
export interface ConsultationAnnounce {
|
|
111
|
+
thread_id: string;
|
|
112
|
+
subject: string;
|
|
113
|
+
target_modules: string[];
|
|
114
|
+
target_files: string[];
|
|
115
|
+
}
|
|
116
|
+
export interface CoordinatorConfig {
|
|
117
|
+
dataDir: string;
|
|
118
|
+
authEnabled?: boolean;
|
|
119
|
+
jwtSecret?: string;
|
|
120
|
+
jwtExpiry?: string;
|
|
121
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|