@rigour-labs/mcp 2.17.2 → 2.18.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -0
- package/dist/index.js +493 -0
- package/dist/index.test.d.ts +1 -0
- package/dist/index.test.js +254 -0
- package/dist/supervisor.test.d.ts +1 -0
- package/dist/supervisor.test.js +128 -0
- package/package.json +4 -3
- package/src/index.test.ts +333 -0
- package/src/index.ts +554 -0
- package/src/supervisor.test.ts +158 -0
|
@@ -0,0 +1,254 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import * as fs from 'fs-extra';
|
|
3
|
+
import * as path from 'path';
|
|
4
|
+
import * as os from 'os';
|
|
5
|
+
// Mock the MCP tool handlers for testing
|
|
6
|
+
// In a real scenario, we'd refactor index.ts to export testable functions
|
|
7
|
+
describe('MCP Frontier Tools', () => {
|
|
8
|
+
let testDir;
|
|
9
|
+
let rigourDir;
|
|
10
|
+
beforeEach(async () => {
|
|
11
|
+
testDir = fs.mkdtempSync(path.join(os.tmpdir(), 'mcp-test-'));
|
|
12
|
+
rigourDir = path.join(testDir, '.rigour');
|
|
13
|
+
await fs.ensureDir(rigourDir);
|
|
14
|
+
});
|
|
15
|
+
afterEach(async () => {
|
|
16
|
+
await fs.remove(testDir);
|
|
17
|
+
});
|
|
18
|
+
describe('rigour_agent_register', () => {
|
|
19
|
+
const sessionPath = () => path.join(rigourDir, 'agent-session.json');
|
|
20
|
+
async function registerAgent(agentId, taskScope) {
|
|
21
|
+
let session = { agents: [], startedAt: new Date().toISOString() };
|
|
22
|
+
if (await fs.pathExists(sessionPath())) {
|
|
23
|
+
session = JSON.parse(await fs.readFile(sessionPath(), 'utf-8'));
|
|
24
|
+
}
|
|
25
|
+
const existingIdx = session.agents.findIndex((a) => a.agentId === agentId);
|
|
26
|
+
if (existingIdx >= 0) {
|
|
27
|
+
session.agents[existingIdx] = {
|
|
28
|
+
agentId,
|
|
29
|
+
taskScope,
|
|
30
|
+
registeredAt: session.agents[existingIdx].registeredAt,
|
|
31
|
+
lastCheckpoint: new Date().toISOString(),
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
else {
|
|
35
|
+
session.agents.push({
|
|
36
|
+
agentId,
|
|
37
|
+
taskScope,
|
|
38
|
+
registeredAt: new Date().toISOString(),
|
|
39
|
+
lastCheckpoint: new Date().toISOString(),
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
// Check for scope conflicts
|
|
43
|
+
const conflicts = [];
|
|
44
|
+
for (const agent of session.agents) {
|
|
45
|
+
if (agent.agentId !== agentId) {
|
|
46
|
+
for (const scope of taskScope) {
|
|
47
|
+
if (agent.taskScope.includes(scope)) {
|
|
48
|
+
conflicts.push(`${agent.agentId} also claims "${scope}"`);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
await fs.writeFile(sessionPath(), JSON.stringify(session, null, 2));
|
|
54
|
+
return { session, conflicts };
|
|
55
|
+
}
|
|
56
|
+
it('should register a new agent', async () => {
|
|
57
|
+
const { session, conflicts } = await registerAgent('agent-a', ['src/api/**']);
|
|
58
|
+
expect(session.agents).toHaveLength(1);
|
|
59
|
+
expect(session.agents[0].agentId).toBe('agent-a');
|
|
60
|
+
expect(conflicts).toHaveLength(0);
|
|
61
|
+
});
|
|
62
|
+
it('should detect scope conflicts', async () => {
|
|
63
|
+
await registerAgent('agent-a', ['src/api/**', 'src/utils/**']);
|
|
64
|
+
const { conflicts } = await registerAgent('agent-b', ['src/api/**']);
|
|
65
|
+
expect(conflicts).toHaveLength(1);
|
|
66
|
+
expect(conflicts[0]).toContain('agent-a');
|
|
67
|
+
});
|
|
68
|
+
it('should update existing agent registration', async () => {
|
|
69
|
+
await registerAgent('agent-a', ['src/api/**']);
|
|
70
|
+
const { session } = await registerAgent('agent-a', ['src/api/**', 'tests/**']);
|
|
71
|
+
expect(session.agents).toHaveLength(1);
|
|
72
|
+
expect(session.agents[0].taskScope).toContain('tests/**');
|
|
73
|
+
});
|
|
74
|
+
it('should support multiple agents', async () => {
|
|
75
|
+
await registerAgent('agent-a', ['src/frontend/**']);
|
|
76
|
+
await registerAgent('agent-b', ['src/backend/**']);
|
|
77
|
+
const { session } = await registerAgent('agent-c', ['src/shared/**']);
|
|
78
|
+
expect(session.agents).toHaveLength(3);
|
|
79
|
+
});
|
|
80
|
+
});
|
|
81
|
+
describe('rigour_checkpoint', () => {
|
|
82
|
+
const checkpointPath = () => path.join(rigourDir, 'checkpoint-session.json');
|
|
83
|
+
async function recordCheckpoint(progressPct, qualityScore, summary = 'Test') {
|
|
84
|
+
let session = {
|
|
85
|
+
sessionId: `chk-session-${Date.now()}`,
|
|
86
|
+
startedAt: new Date().toISOString(),
|
|
87
|
+
checkpoints: [],
|
|
88
|
+
status: 'active'
|
|
89
|
+
};
|
|
90
|
+
if (await fs.pathExists(checkpointPath())) {
|
|
91
|
+
session = JSON.parse(await fs.readFile(checkpointPath(), 'utf-8'));
|
|
92
|
+
}
|
|
93
|
+
const warnings = [];
|
|
94
|
+
if (qualityScore < 80) {
|
|
95
|
+
warnings.push(`Quality score ${qualityScore}% is below threshold 80%`);
|
|
96
|
+
}
|
|
97
|
+
// Drift detection
|
|
98
|
+
if (session.checkpoints.length >= 2) {
|
|
99
|
+
const recentScores = session.checkpoints.slice(-3).map((cp) => cp.qualityScore);
|
|
100
|
+
const avgRecent = recentScores.reduce((a, b) => a + b, 0) / recentScores.length;
|
|
101
|
+
if (qualityScore < avgRecent - 10) {
|
|
102
|
+
warnings.push(`Drift detected: quality dropped from avg ${avgRecent.toFixed(0)}% to ${qualityScore}%`);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
const checkpoint = {
|
|
106
|
+
checkpointId: `cp-${Date.now()}`,
|
|
107
|
+
timestamp: new Date().toISOString(),
|
|
108
|
+
progressPct,
|
|
109
|
+
summary,
|
|
110
|
+
qualityScore,
|
|
111
|
+
warnings,
|
|
112
|
+
};
|
|
113
|
+
session.checkpoints.push(checkpoint);
|
|
114
|
+
await fs.writeFile(checkpointPath(), JSON.stringify(session, null, 2));
|
|
115
|
+
return { checkpoint, warnings, session };
|
|
116
|
+
}
|
|
117
|
+
it('should record a checkpoint', async () => {
|
|
118
|
+
const { checkpoint, session } = await recordCheckpoint(25, 85, 'Initial work');
|
|
119
|
+
expect(checkpoint.progressPct).toBe(25);
|
|
120
|
+
expect(checkpoint.qualityScore).toBe(85);
|
|
121
|
+
expect(session.checkpoints).toHaveLength(1);
|
|
122
|
+
});
|
|
123
|
+
it('should warn on low quality score', async () => {
|
|
124
|
+
const { warnings } = await recordCheckpoint(50, 65, 'Struggling');
|
|
125
|
+
expect(warnings.some(w => w.includes('below threshold'))).toBe(true);
|
|
126
|
+
});
|
|
127
|
+
it('should detect quality drift', async () => {
|
|
128
|
+
await recordCheckpoint(20, 90);
|
|
129
|
+
await recordCheckpoint(40, 88);
|
|
130
|
+
await recordCheckpoint(60, 85);
|
|
131
|
+
const { warnings } = await recordCheckpoint(80, 70);
|
|
132
|
+
expect(warnings.some(w => w.includes('Drift detected'))).toBe(true);
|
|
133
|
+
});
|
|
134
|
+
it('should track multiple checkpoints', async () => {
|
|
135
|
+
await recordCheckpoint(25, 90);
|
|
136
|
+
await recordCheckpoint(50, 88);
|
|
137
|
+
await recordCheckpoint(75, 92);
|
|
138
|
+
const { session } = await recordCheckpoint(100, 95);
|
|
139
|
+
expect(session.checkpoints).toHaveLength(4);
|
|
140
|
+
});
|
|
141
|
+
});
|
|
142
|
+
describe('rigour_handoff', () => {
|
|
143
|
+
const handoffPath = () => path.join(rigourDir, 'handoffs.jsonl');
|
|
144
|
+
async function createHandoff(fromAgentId, toAgentId, taskDescription, filesInScope = []) {
|
|
145
|
+
const handoff = {
|
|
146
|
+
handoffId: `handoff-${Date.now()}`,
|
|
147
|
+
timestamp: new Date().toISOString(),
|
|
148
|
+
fromAgentId,
|
|
149
|
+
toAgentId,
|
|
150
|
+
taskDescription,
|
|
151
|
+
filesInScope,
|
|
152
|
+
status: 'pending',
|
|
153
|
+
};
|
|
154
|
+
await fs.appendFile(handoffPath(), JSON.stringify(handoff) + '\n');
|
|
155
|
+
return handoff;
|
|
156
|
+
}
|
|
157
|
+
it('should create a handoff record', async () => {
|
|
158
|
+
const handoff = await createHandoff('agent-a', 'agent-b', 'Complete API integration');
|
|
159
|
+
expect(handoff.fromAgentId).toBe('agent-a');
|
|
160
|
+
expect(handoff.toAgentId).toBe('agent-b');
|
|
161
|
+
expect(handoff.status).toBe('pending');
|
|
162
|
+
});
|
|
163
|
+
it('should include files in scope', async () => {
|
|
164
|
+
const handoff = await createHandoff('agent-a', 'agent-b', 'Fix tests', ['tests/api.test.ts', 'tests/utils.test.ts']);
|
|
165
|
+
expect(handoff.filesInScope).toHaveLength(2);
|
|
166
|
+
});
|
|
167
|
+
it('should append multiple handoffs', async () => {
|
|
168
|
+
await createHandoff('agent-a', 'agent-b', 'Task 1');
|
|
169
|
+
await createHandoff('agent-b', 'agent-c', 'Task 2');
|
|
170
|
+
const content = await fs.readFile(handoffPath(), 'utf-8');
|
|
171
|
+
const lines = content.trim().split('\n');
|
|
172
|
+
expect(lines).toHaveLength(2);
|
|
173
|
+
});
|
|
174
|
+
});
|
|
175
|
+
describe('rigour_agent_deregister', () => {
|
|
176
|
+
const sessionPath = () => path.join(rigourDir, 'agent-session.json');
|
|
177
|
+
async function deregisterAgent(agentId) {
|
|
178
|
+
if (!await fs.pathExists(sessionPath())) {
|
|
179
|
+
return { success: false, message: 'No active session' };
|
|
180
|
+
}
|
|
181
|
+
const session = JSON.parse(await fs.readFile(sessionPath(), 'utf-8'));
|
|
182
|
+
const initialCount = session.agents.length;
|
|
183
|
+
session.agents = session.agents.filter((a) => a.agentId !== agentId);
|
|
184
|
+
await fs.writeFile(sessionPath(), JSON.stringify(session, null, 2));
|
|
185
|
+
return {
|
|
186
|
+
success: session.agents.length < initialCount,
|
|
187
|
+
remainingAgents: session.agents.length,
|
|
188
|
+
};
|
|
189
|
+
}
|
|
190
|
+
it('should remove an agent from session', async () => {
|
|
191
|
+
// First register
|
|
192
|
+
const session = { agents: [{ agentId: 'agent-a', taskScope: [] }], startedAt: new Date().toISOString() };
|
|
193
|
+
await fs.writeFile(sessionPath(), JSON.stringify(session));
|
|
194
|
+
const result = await deregisterAgent('agent-a');
|
|
195
|
+
expect(result.success).toBe(true);
|
|
196
|
+
expect(result.remainingAgents).toBe(0);
|
|
197
|
+
});
|
|
198
|
+
it('should handle non-existent agent', async () => {
|
|
199
|
+
const session = { agents: [{ agentId: 'agent-a', taskScope: [] }], startedAt: new Date().toISOString() };
|
|
200
|
+
await fs.writeFile(sessionPath(), JSON.stringify(session));
|
|
201
|
+
const result = await deregisterAgent('agent-b');
|
|
202
|
+
expect(result.success).toBe(false);
|
|
203
|
+
});
|
|
204
|
+
});
|
|
205
|
+
describe('rigour_handoff_accept', () => {
|
|
206
|
+
const handoffPath = () => path.join(rigourDir, 'handoffs.jsonl');
|
|
207
|
+
async function acceptHandoff(handoffId, agentId) {
|
|
208
|
+
if (!await fs.pathExists(handoffPath())) {
|
|
209
|
+
return { success: false, message: 'No handoffs found' };
|
|
210
|
+
}
|
|
211
|
+
const content = await fs.readFile(handoffPath(), 'utf-8');
|
|
212
|
+
const handoffs = content.trim().split('\n').map(line => JSON.parse(line));
|
|
213
|
+
const handoff = handoffs.find(h => h.handoffId === handoffId);
|
|
214
|
+
if (!handoff) {
|
|
215
|
+
return { success: false, message: 'Handoff not found' };
|
|
216
|
+
}
|
|
217
|
+
if (handoff.toAgentId !== agentId) {
|
|
218
|
+
return { success: false, message: 'Agent not the intended recipient' };
|
|
219
|
+
}
|
|
220
|
+
handoff.status = 'accepted';
|
|
221
|
+
handoff.acceptedAt = new Date().toISOString();
|
|
222
|
+
// Rewrite the file with updated handoff
|
|
223
|
+
const updatedContent = handoffs.map(h => JSON.stringify(h)).join('\n') + '\n';
|
|
224
|
+
await fs.writeFile(handoffPath(), updatedContent);
|
|
225
|
+
return { success: true, handoff };
|
|
226
|
+
}
|
|
227
|
+
it('should accept a pending handoff', async () => {
|
|
228
|
+
const handoff = {
|
|
229
|
+
handoffId: 'handoff-123',
|
|
230
|
+
fromAgentId: 'agent-a',
|
|
231
|
+
toAgentId: 'agent-b',
|
|
232
|
+
taskDescription: 'Test task',
|
|
233
|
+
status: 'pending',
|
|
234
|
+
};
|
|
235
|
+
await fs.writeFile(handoffPath(), JSON.stringify(handoff) + '\n');
|
|
236
|
+
const result = await acceptHandoff('handoff-123', 'agent-b');
|
|
237
|
+
expect(result.success).toBe(true);
|
|
238
|
+
expect(result.handoff?.status).toBe('accepted');
|
|
239
|
+
});
|
|
240
|
+
it('should reject if agent is not recipient', async () => {
|
|
241
|
+
const handoff = {
|
|
242
|
+
handoffId: 'handoff-123',
|
|
243
|
+
fromAgentId: 'agent-a',
|
|
244
|
+
toAgentId: 'agent-b',
|
|
245
|
+
taskDescription: 'Test task',
|
|
246
|
+
status: 'pending',
|
|
247
|
+
};
|
|
248
|
+
await fs.writeFile(handoffPath(), JSON.stringify(handoff) + '\n');
|
|
249
|
+
const result = await acceptHandoff('handoff-123', 'agent-c');
|
|
250
|
+
expect(result.success).toBe(false);
|
|
251
|
+
expect(result.message).toContain('not the intended recipient');
|
|
252
|
+
});
|
|
253
|
+
});
|
|
254
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import fs from 'fs-extra';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
import os from 'os';
|
|
5
|
+
// Integration-style tests for rigour_run_supervised
|
|
6
|
+
// These test the exported functionality indirectly since MCP server is complex to mock
|
|
7
|
+
describe('rigour_run_supervised', () => {
|
|
8
|
+
let testDir;
|
|
9
|
+
beforeEach(async () => {
|
|
10
|
+
testDir = path.join(os.tmpdir(), `rigour-test-${Date.now()}`);
|
|
11
|
+
await fs.ensureDir(testDir);
|
|
12
|
+
// Create a minimal rigour.yml
|
|
13
|
+
await fs.writeFile(path.join(testDir, 'rigour.yml'), `
|
|
14
|
+
version: 1
|
|
15
|
+
preset: api
|
|
16
|
+
gates:
|
|
17
|
+
max_file_lines: 500
|
|
18
|
+
forbid_todos: true
|
|
19
|
+
required_files: []
|
|
20
|
+
ignore: []
|
|
21
|
+
`);
|
|
22
|
+
// Create .rigour directory for events
|
|
23
|
+
await fs.ensureDir(path.join(testDir, '.rigour'));
|
|
24
|
+
});
|
|
25
|
+
afterEach(async () => {
|
|
26
|
+
await fs.remove(testDir);
|
|
27
|
+
});
|
|
28
|
+
it('should have correct tool schema', () => {
|
|
29
|
+
// Verify the tool schema includes all required fields
|
|
30
|
+
const expectedProperties = ['cwd', 'command', 'maxRetries', 'dryRun'];
|
|
31
|
+
const requiredProperties = ['cwd', 'command'];
|
|
32
|
+
// This is a schema validation test - in real MCP, the server validates this
|
|
33
|
+
expect(expectedProperties).toContain('dryRun');
|
|
34
|
+
expect(requiredProperties).not.toContain('dryRun'); // dryRun should be optional
|
|
35
|
+
});
|
|
36
|
+
it('should log supervisor_started event', async () => {
|
|
37
|
+
// Simulate what the handler does
|
|
38
|
+
const eventsPath = path.join(testDir, '.rigour', 'events.jsonl');
|
|
39
|
+
const event = {
|
|
40
|
+
id: 'test-id',
|
|
41
|
+
timestamp: new Date().toISOString(),
|
|
42
|
+
type: 'supervisor_started',
|
|
43
|
+
requestId: 'req-123',
|
|
44
|
+
command: 'echo "test"',
|
|
45
|
+
maxRetries: 3,
|
|
46
|
+
dryRun: true
|
|
47
|
+
};
|
|
48
|
+
await fs.appendFile(eventsPath, JSON.stringify(event) + '\n');
|
|
49
|
+
const content = await fs.readFile(eventsPath, 'utf-8');
|
|
50
|
+
const logged = JSON.parse(content.trim());
|
|
51
|
+
expect(logged.type).toBe('supervisor_started');
|
|
52
|
+
expect(logged.dryRun).toBe(true);
|
|
53
|
+
expect(logged.maxRetries).toBe(3);
|
|
54
|
+
});
|
|
55
|
+
it('should log supervisor_iteration events', async () => {
|
|
56
|
+
const eventsPath = path.join(testDir, '.rigour', 'events.jsonl');
|
|
57
|
+
// Simulate iteration logging
|
|
58
|
+
const iterations = [
|
|
59
|
+
{ iteration: 1, status: 'FAIL', failures: 2 },
|
|
60
|
+
{ iteration: 2, status: 'FAIL', failures: 1 },
|
|
61
|
+
{ iteration: 3, status: 'PASS', failures: 0 },
|
|
62
|
+
];
|
|
63
|
+
for (const iter of iterations) {
|
|
64
|
+
const event = {
|
|
65
|
+
id: `iter-${iter.iteration}`,
|
|
66
|
+
timestamp: new Date().toISOString(),
|
|
67
|
+
type: 'supervisor_iteration',
|
|
68
|
+
requestId: 'req-123',
|
|
69
|
+
...iter
|
|
70
|
+
};
|
|
71
|
+
await fs.appendFile(eventsPath, JSON.stringify(event) + '\n');
|
|
72
|
+
}
|
|
73
|
+
const content = await fs.readFile(eventsPath, 'utf-8');
|
|
74
|
+
const lines = content.trim().split('\n').map(l => JSON.parse(l));
|
|
75
|
+
expect(lines.length).toBe(3);
|
|
76
|
+
expect(lines[0].iteration).toBe(1);
|
|
77
|
+
expect(lines[2].status).toBe('PASS');
|
|
78
|
+
});
|
|
79
|
+
it('should log supervisor_completed event with final status', async () => {
|
|
80
|
+
const eventsPath = path.join(testDir, '.rigour', 'events.jsonl');
|
|
81
|
+
const event = {
|
|
82
|
+
id: 'completed-1',
|
|
83
|
+
timestamp: new Date().toISOString(),
|
|
84
|
+
type: 'supervisor_completed',
|
|
85
|
+
requestId: 'req-123',
|
|
86
|
+
finalStatus: 'PASS',
|
|
87
|
+
totalIterations: 2
|
|
88
|
+
};
|
|
89
|
+
await fs.appendFile(eventsPath, JSON.stringify(event) + '\n');
|
|
90
|
+
const content = await fs.readFile(eventsPath, 'utf-8');
|
|
91
|
+
const logged = JSON.parse(content.trim());
|
|
92
|
+
expect(logged.type).toBe('supervisor_completed');
|
|
93
|
+
expect(logged.finalStatus).toBe('PASS');
|
|
94
|
+
expect(logged.totalIterations).toBe(2);
|
|
95
|
+
});
|
|
96
|
+
it('should track iteration history correctly', () => {
|
|
97
|
+
const iterations = [];
|
|
98
|
+
// Simulate the supervisor loop
|
|
99
|
+
iterations.push({ iteration: 1, status: 'FAIL', failures: 3 });
|
|
100
|
+
iterations.push({ iteration: 2, status: 'FAIL', failures: 1 });
|
|
101
|
+
iterations.push({ iteration: 3, status: 'PASS', failures: 0 });
|
|
102
|
+
const summary = iterations.map(i => ` ${i.iteration}. ${i.status} (${i.failures} failures)`).join('\n');
|
|
103
|
+
expect(summary).toContain('1. FAIL (3 failures)');
|
|
104
|
+
expect(summary).toContain('3. PASS (0 failures)');
|
|
105
|
+
expect(iterations.length).toBe(3);
|
|
106
|
+
});
|
|
107
|
+
it('should generate fix packet for failures', () => {
|
|
108
|
+
const failures = [
|
|
109
|
+
{ id: 'max_lines', title: 'File too long', details: 'src/index.ts has 600 lines', files: ['src/index.ts'], hint: 'Split into modules' },
|
|
110
|
+
{ id: 'forbid_todos', title: 'TODO found', details: 'Found TODO comment', files: ['src/utils.ts'] },
|
|
111
|
+
];
|
|
112
|
+
const fixPacket = failures.map((f, i) => {
|
|
113
|
+
let text = `FIX TASK ${i + 1}: [${f.id.toUpperCase()}] ${f.title}\n`;
|
|
114
|
+
text += ` - CONTEXT: ${f.details}\n`;
|
|
115
|
+
if (f.files && f.files.length > 0) {
|
|
116
|
+
text += ` - TARGET FILES: ${f.files.join(', ')}\n`;
|
|
117
|
+
}
|
|
118
|
+
if (f.hint) {
|
|
119
|
+
text += ` - REFACTORING GUIDANCE: ${f.hint}\n`;
|
|
120
|
+
}
|
|
121
|
+
return text;
|
|
122
|
+
}).join('\n---\n');
|
|
123
|
+
expect(fixPacket).toContain('[MAX_LINES]');
|
|
124
|
+
expect(fixPacket).toContain('[FORBID_TODOS]');
|
|
125
|
+
expect(fixPacket).toContain('Split into modules');
|
|
126
|
+
expect(fixPacket).toContain('src/index.ts');
|
|
127
|
+
});
|
|
128
|
+
});
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@rigour-labs/mcp",
|
|
3
|
-
"version": "2.
|
|
3
|
+
"version": "2.18.1",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"mcpName": "io.github.rigour-labs/rigour",
|
|
6
6
|
"description": "Quality gates for AI-generated code. Forces AI agents to meet strict engineering standards with PASS/FAIL enforcement.",
|
|
@@ -20,10 +20,11 @@
|
|
|
20
20
|
"execa": "^8.0.1",
|
|
21
21
|
"fs-extra": "^11.2.0",
|
|
22
22
|
"yaml": "^2.8.2",
|
|
23
|
-
"@rigour-labs/core": "2.
|
|
23
|
+
"@rigour-labs/core": "2.18.1"
|
|
24
24
|
},
|
|
25
25
|
"devDependencies": {
|
|
26
|
-
"@types/node": "^25.0.3"
|
|
26
|
+
"@types/node": "^25.0.3",
|
|
27
|
+
"vitest": "^1.0.0"
|
|
27
28
|
},
|
|
28
29
|
"scripts": {
|
|
29
30
|
"build": "tsc",
|