@infinitedusky/indusk-mcp 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/cli.d.ts +2 -0
- package/dist/bin/cli.js +34 -0
- package/dist/bin/commands/init.d.ts +1 -0
- package/dist/bin/commands/init.js +213 -0
- package/dist/bin/commands/update.d.ts +1 -0
- package/dist/bin/commands/update.js +40 -0
- package/dist/lib/context-parser.d.ts +26 -0
- package/dist/lib/context-parser.js +86 -0
- package/dist/lib/impl-parser.d.ts +31 -0
- package/dist/lib/impl-parser.js +107 -0
- package/dist/lib/plan-parser.d.ts +16 -0
- package/dist/lib/plan-parser.js +82 -0
- package/dist/server/index.d.ts +1 -0
- package/dist/server/index.js +22 -0
- package/dist/tools/context-tools.d.ts +2 -0
- package/dist/tools/context-tools.js +61 -0
- package/dist/tools/document-tools.d.ts +2 -0
- package/dist/tools/document-tools.js +77 -0
- package/dist/tools/plan-tools.d.ts +2 -0
- package/dist/tools/plan-tools.js +158 -0
- package/dist/tools/quality-tools.d.ts +2 -0
- package/dist/tools/quality-tools.js +130 -0
- package/dist/tools/system-tools.d.ts +2 -0
- package/dist/tools/system-tools.js +147 -0
- package/package.json +38 -0
- package/skills/context.md +154 -0
- package/skills/document.md +134 -0
- package/skills/plan.md +278 -0
- package/skills/retrospective.md +126 -0
- package/skills/verify.md +138 -0
- package/skills/work.md +104 -0
- package/templates/CLAUDE.md +25 -0
- package/templates/biome.template.json +36 -0
- package/templates/vscode-settings.json +23 -0
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { join } from "node:path";
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
import { parseContext, SECTION_NAMES, updateSection, validateContext, } from "../lib/context-parser.js";
|
|
4
|
+
export function registerContextTools(server, projectRoot) {
|
|
5
|
+
const claudeMdPath = join(projectRoot, "CLAUDE.md");
|
|
6
|
+
server.registerTool("get_context", {
|
|
7
|
+
description: "Returns CLAUDE.md parsed into its 6 canonical sections with validation status",
|
|
8
|
+
}, async () => {
|
|
9
|
+
const parsed = parseContext(claudeMdPath);
|
|
10
|
+
const validation = validateContext(parsed);
|
|
11
|
+
return {
|
|
12
|
+
content: [
|
|
13
|
+
{
|
|
14
|
+
type: "text",
|
|
15
|
+
text: JSON.stringify({ ...parsed, validation }, null, 2),
|
|
16
|
+
},
|
|
17
|
+
],
|
|
18
|
+
};
|
|
19
|
+
});
|
|
20
|
+
server.registerTool("update_context", {
|
|
21
|
+
description: "Update a specific section of CLAUDE.md. Validates that the section exists and structure is preserved.",
|
|
22
|
+
inputSchema: {
|
|
23
|
+
section: z.enum(SECTION_NAMES).describe("The section to update"),
|
|
24
|
+
content: z.string().describe("New content for the section (replaces existing content)"),
|
|
25
|
+
},
|
|
26
|
+
}, async ({ section, content }) => {
|
|
27
|
+
// Validate current structure before modifying
|
|
28
|
+
const parsed = parseContext(claudeMdPath);
|
|
29
|
+
const validation = validateContext(parsed);
|
|
30
|
+
if (!validation.valid) {
|
|
31
|
+
return {
|
|
32
|
+
content: [
|
|
33
|
+
{
|
|
34
|
+
type: "text",
|
|
35
|
+
text: JSON.stringify({
|
|
36
|
+
success: false,
|
|
37
|
+
error: "CLAUDE.md structure is invalid — fix manually before updating via tool",
|
|
38
|
+
validation,
|
|
39
|
+
}, null, 2),
|
|
40
|
+
},
|
|
41
|
+
],
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
updateSection(claudeMdPath, section, content);
|
|
45
|
+
// Validate after update
|
|
46
|
+
const afterParsed = parseContext(claudeMdPath);
|
|
47
|
+
const afterValidation = validateContext(afterParsed);
|
|
48
|
+
return {
|
|
49
|
+
content: [
|
|
50
|
+
{
|
|
51
|
+
type: "text",
|
|
52
|
+
text: JSON.stringify({
|
|
53
|
+
success: true,
|
|
54
|
+
section,
|
|
55
|
+
structureValid: afterValidation.valid,
|
|
56
|
+
}, null, 2),
|
|
57
|
+
},
|
|
58
|
+
],
|
|
59
|
+
};
|
|
60
|
+
});
|
|
61
|
+
}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import { existsSync, readdirSync } from "node:fs";
|
|
2
|
+
import { join, relative } from "node:path";
|
|
3
|
+
import { parseAllPlans } from "../lib/plan-parser.js";
|
|
4
|
+
function findMarkdownFiles(dir, base) {
|
|
5
|
+
if (!existsSync(dir))
|
|
6
|
+
return [];
|
|
7
|
+
const results = [];
|
|
8
|
+
const entries = readdirSync(dir, { withFileTypes: true });
|
|
9
|
+
for (const entry of entries) {
|
|
10
|
+
const fullPath = join(dir, entry.name);
|
|
11
|
+
if (entry.isDirectory() && !entry.name.startsWith(".")) {
|
|
12
|
+
results.push(...findMarkdownFiles(fullPath, base));
|
|
13
|
+
}
|
|
14
|
+
else if (entry.name.endsWith(".md")) {
|
|
15
|
+
results.push(relative(base, fullPath));
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
return results;
|
|
19
|
+
}
|
|
20
|
+
export function registerDocumentTools(server, projectRoot) {
|
|
21
|
+
const docsDir = join(projectRoot, "apps/indusk-docs/src");
|
|
22
|
+
server.registerTool("list_docs", {
|
|
23
|
+
description: "List all markdown files in the VitePress docs directory (apps/indusk-docs/src/)",
|
|
24
|
+
}, async () => {
|
|
25
|
+
const files = findMarkdownFiles(docsDir, docsDir);
|
|
26
|
+
return {
|
|
27
|
+
content: [
|
|
28
|
+
{
|
|
29
|
+
type: "text",
|
|
30
|
+
text: JSON.stringify({ docsDir: "apps/indusk-docs/src", files }, null, 2),
|
|
31
|
+
},
|
|
32
|
+
],
|
|
33
|
+
};
|
|
34
|
+
});
|
|
35
|
+
server.registerTool("check_docs_coverage", {
|
|
36
|
+
description: "Compare completed plans to existing decision/lesson pages in the docs site. Flags plans that lack corresponding documentation.",
|
|
37
|
+
}, async () => {
|
|
38
|
+
const plans = parseAllPlans(projectRoot);
|
|
39
|
+
const completedPlans = plans.filter((p) => p.stageStatus === "completed" || p.documents.includes("retrospective.md"));
|
|
40
|
+
// Check decisions directory for matching pages
|
|
41
|
+
const decisionsDir = join(docsDir, "decisions");
|
|
42
|
+
const lessonsDir = join(docsDir, "lessons");
|
|
43
|
+
const decisionFiles = existsSync(decisionsDir)
|
|
44
|
+
? readdirSync(decisionsDir).filter((f) => f.endsWith(".md") && f !== "index.md")
|
|
45
|
+
: [];
|
|
46
|
+
const lessonFiles = existsSync(lessonsDir)
|
|
47
|
+
? readdirSync(lessonsDir).filter((f) => f.endsWith(".md") && f !== "index.md")
|
|
48
|
+
: [];
|
|
49
|
+
// Check each completed plan for a matching decision page
|
|
50
|
+
const coverage = completedPlans.map((plan) => {
|
|
51
|
+
const hasDecision = decisionFiles.some((f) => f.includes(plan.name));
|
|
52
|
+
const hasLesson = lessonFiles.some((f) => f.includes(plan.name));
|
|
53
|
+
return {
|
|
54
|
+
plan: plan.name,
|
|
55
|
+
stage: plan.stage,
|
|
56
|
+
stageStatus: plan.stageStatus,
|
|
57
|
+
hasDecisionPage: hasDecision,
|
|
58
|
+
hasLessonPage: hasLesson,
|
|
59
|
+
gap: !hasDecision,
|
|
60
|
+
};
|
|
61
|
+
});
|
|
62
|
+
const gaps = coverage.filter((c) => c.gap);
|
|
63
|
+
return {
|
|
64
|
+
content: [
|
|
65
|
+
{
|
|
66
|
+
type: "text",
|
|
67
|
+
text: JSON.stringify({
|
|
68
|
+
completedPlans: completedPlans.length,
|
|
69
|
+
documented: coverage.filter((c) => !c.gap).length,
|
|
70
|
+
gaps: gaps.length,
|
|
71
|
+
coverage,
|
|
72
|
+
}, null, 2),
|
|
73
|
+
},
|
|
74
|
+
],
|
|
75
|
+
};
|
|
76
|
+
});
|
|
77
|
+
}
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
import { join } from "node:path";
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
import { getAllPhaseCompletions, parseImpl } from "../lib/impl-parser.js";
|
|
4
|
+
import { parseAllPlans, parsePlan } from "../lib/plan-parser.js";
|
|
5
|
+
export function registerPlanTools(server, projectRoot) {
|
|
6
|
+
server.registerTool("list_plans", {
|
|
7
|
+
description: "List all plans in the planning/ directory with their stage, status, next step, and dependencies",
|
|
8
|
+
}, async () => {
|
|
9
|
+
const plans = parseAllPlans(projectRoot);
|
|
10
|
+
return {
|
|
11
|
+
content: [{ type: "text", text: JSON.stringify(plans, null, 2) }],
|
|
12
|
+
};
|
|
13
|
+
});
|
|
14
|
+
server.registerTool("get_plan_status", {
|
|
15
|
+
description: "Get detailed status of a specific plan including phase progress and blocked items",
|
|
16
|
+
inputSchema: { name: z.string().describe("Plan directory name (e.g. 'mcp-dev-system')") },
|
|
17
|
+
}, async ({ name }) => {
|
|
18
|
+
const planDir = join(projectRoot, "planning", name);
|
|
19
|
+
const plan = parsePlan(planDir);
|
|
20
|
+
const implPath = join(planDir, "impl.md");
|
|
21
|
+
const impl = parseImpl(implPath);
|
|
22
|
+
const completions = impl.phases.length > 0 ? getAllPhaseCompletions(impl) : [];
|
|
23
|
+
const result = {
|
|
24
|
+
...plan,
|
|
25
|
+
implStatus: impl.status,
|
|
26
|
+
phases: completions,
|
|
27
|
+
};
|
|
28
|
+
return {
|
|
29
|
+
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
|
30
|
+
};
|
|
31
|
+
});
|
|
32
|
+
server.registerTool("advance_plan", {
|
|
33
|
+
description: "Validate whether a plan can advance to the next stage. Returns what is missing if blocked.",
|
|
34
|
+
inputSchema: { name: z.string().describe("Plan directory name") },
|
|
35
|
+
}, async ({ name }) => {
|
|
36
|
+
const planDir = join(projectRoot, "planning", name);
|
|
37
|
+
const plan = parsePlan(planDir);
|
|
38
|
+
const implPath = join(planDir, "impl.md");
|
|
39
|
+
const impl = parseImpl(implPath);
|
|
40
|
+
const respond = (result) => ({
|
|
41
|
+
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
|
42
|
+
});
|
|
43
|
+
// Brief → ADR: brief status must be "accepted"
|
|
44
|
+
if (plan.stage === "brief") {
|
|
45
|
+
if (plan.stageStatus === "accepted") {
|
|
46
|
+
return respond({ allowed: true, transition: "brief → adr", nextStage: "Create adr" });
|
|
47
|
+
}
|
|
48
|
+
return respond({
|
|
49
|
+
allowed: false,
|
|
50
|
+
transition: "brief → adr",
|
|
51
|
+
missing: [`Brief status is '${plan.stageStatus}', must be 'accepted'`],
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
// ADR → Impl: ADR status must be "accepted"
|
|
55
|
+
if (plan.stage === "adr") {
|
|
56
|
+
if (plan.stageStatus === "accepted") {
|
|
57
|
+
return respond({ allowed: true, transition: "adr → impl", nextStage: "Create impl" });
|
|
58
|
+
}
|
|
59
|
+
return respond({
|
|
60
|
+
allowed: false,
|
|
61
|
+
transition: "adr → impl",
|
|
62
|
+
missing: [`ADR status is '${plan.stageStatus}', must be 'accepted'`],
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
// Impl phases and impl → retrospective
|
|
66
|
+
if (plan.stage === "impl" && impl.phases.length > 0) {
|
|
67
|
+
const completions = getAllPhaseCompletions(impl);
|
|
68
|
+
const currentPhase = completions.find((c) => !c.complete);
|
|
69
|
+
if (currentPhase) {
|
|
70
|
+
const missing = [];
|
|
71
|
+
for (const [gate, items] of Object.entries(currentPhase.uncheckedByGate)) {
|
|
72
|
+
for (const item of items) {
|
|
73
|
+
missing.push(`[${gate}] ${item}`);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
return respond({
|
|
77
|
+
allowed: false,
|
|
78
|
+
transition: `phase ${currentPhase.phase} → phase ${currentPhase.phase + 1}`,
|
|
79
|
+
currentPhase: currentPhase.phase,
|
|
80
|
+
phaseName: currentPhase.name,
|
|
81
|
+
missing,
|
|
82
|
+
});
|
|
83
|
+
}
|
|
84
|
+
// All phases complete — check impl status for retrospective
|
|
85
|
+
if (impl.status !== "completed") {
|
|
86
|
+
return respond({
|
|
87
|
+
allowed: false,
|
|
88
|
+
transition: "impl → retrospective",
|
|
89
|
+
missing: ["Impl status is not 'completed' — update frontmatter status"],
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
return respond({
|
|
93
|
+
allowed: true,
|
|
94
|
+
transition: "impl → retrospective",
|
|
95
|
+
nextStage: "Create retrospective",
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
// Research or other stages
|
|
99
|
+
if (plan.stageStatus === "accepted" || plan.stageStatus === "completed") {
|
|
100
|
+
return respond({ allowed: true, nextStage: plan.nextStep });
|
|
101
|
+
}
|
|
102
|
+
return respond({
|
|
103
|
+
allowed: false,
|
|
104
|
+
missing: [`${plan.stage} status is '${plan.stageStatus}', needs 'accepted' or 'completed'`],
|
|
105
|
+
});
|
|
106
|
+
});
|
|
107
|
+
server.registerTool("order_plans", {
|
|
108
|
+
description: "Get plan execution order based on dependencies (topological sort)",
|
|
109
|
+
}, async () => {
|
|
110
|
+
const plans = parseAllPlans(projectRoot);
|
|
111
|
+
// Topological sort via Kahn's algorithm
|
|
112
|
+
const inDegree = new Map();
|
|
113
|
+
const adj = new Map();
|
|
114
|
+
for (const plan of plans) {
|
|
115
|
+
inDegree.set(plan.name, 0);
|
|
116
|
+
adj.set(plan.name, []);
|
|
117
|
+
}
|
|
118
|
+
for (const plan of plans) {
|
|
119
|
+
for (const dep of plan.dependencies) {
|
|
120
|
+
if (adj.has(dep)) {
|
|
121
|
+
adj.get(dep)?.push(plan.name);
|
|
122
|
+
inDegree.set(plan.name, (inDegree.get(plan.name) ?? 0) + 1);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
const queue = [];
|
|
127
|
+
for (const [name, degree] of inDegree) {
|
|
128
|
+
if (degree === 0)
|
|
129
|
+
queue.push(name);
|
|
130
|
+
}
|
|
131
|
+
const ordered = [];
|
|
132
|
+
while (queue.length > 0) {
|
|
133
|
+
const current = queue.shift();
|
|
134
|
+
if (!current)
|
|
135
|
+
break;
|
|
136
|
+
ordered.push(current);
|
|
137
|
+
for (const neighbor of adj.get(current) ?? []) {
|
|
138
|
+
const newDegree = (inDegree.get(neighbor) ?? 1) - 1;
|
|
139
|
+
inDegree.set(neighbor, newDegree);
|
|
140
|
+
if (newDegree === 0)
|
|
141
|
+
queue.push(neighbor);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
const plansByName = new Map(plans.map((p) => [p.name, p]));
|
|
145
|
+
const result = ordered.map((name) => {
|
|
146
|
+
const plan = plansByName.get(name);
|
|
147
|
+
return {
|
|
148
|
+
name,
|
|
149
|
+
stage: plan?.stage,
|
|
150
|
+
stageStatus: plan?.stageStatus,
|
|
151
|
+
dependencies: plan?.dependencies ?? [],
|
|
152
|
+
};
|
|
153
|
+
});
|
|
154
|
+
return {
|
|
155
|
+
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
|
|
156
|
+
};
|
|
157
|
+
});
|
|
158
|
+
}
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import { execSync } from "node:child_process";
|
|
2
|
+
import { existsSync, readFileSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { z } from "zod";
|
|
5
|
+
function parseBiomeOutput(output) {
|
|
6
|
+
const diagnostics = [];
|
|
7
|
+
// Biome outputs diagnostics in a format like:
|
|
8
|
+
// file.ts:line:col lint/category/ruleName LEVEL ━━━
|
|
9
|
+
// message text
|
|
10
|
+
const diagRegex = /^(.+?):(\d+):(\d+)\s+([\w/]+)\s+(?:FIXABLE\s+)?/gm;
|
|
11
|
+
let match = diagRegex.exec(output);
|
|
12
|
+
while (match) {
|
|
13
|
+
diagnostics.push({
|
|
14
|
+
file: match[1],
|
|
15
|
+
line: Number.parseInt(match[2], 10),
|
|
16
|
+
column: Number.parseInt(match[3], 10),
|
|
17
|
+
rule: match[4],
|
|
18
|
+
message: "",
|
|
19
|
+
severity: "error",
|
|
20
|
+
});
|
|
21
|
+
match = diagRegex.exec(output);
|
|
22
|
+
}
|
|
23
|
+
// Also catch the info-level diagnostics
|
|
24
|
+
const infoRegex = /^(.+?):(\d+):(\d+)\s+([\w/]+)\s+FIXABLE/gm;
|
|
25
|
+
for (let infoMatch = infoRegex.exec(output); infoMatch !== null; infoMatch = infoRegex.exec(output)) {
|
|
26
|
+
const file = infoMatch[1];
|
|
27
|
+
const line = Number.parseInt(infoMatch[2], 10);
|
|
28
|
+
const rule = infoMatch[4];
|
|
29
|
+
const existing = diagnostics.find((d) => d.file === file && d.line === line && d.rule === rule);
|
|
30
|
+
if (existing) {
|
|
31
|
+
existing.severity = "info";
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
return diagnostics;
|
|
35
|
+
}
|
|
36
|
+
export function registerQualityTools(server, projectRoot) {
|
|
37
|
+
server.registerTool("get_quality_config", {
|
|
38
|
+
description: "Read biome.json and biome-rationale.md, returning the quality config as structured data",
|
|
39
|
+
}, async () => {
|
|
40
|
+
const biomePath = join(projectRoot, "biome.json");
|
|
41
|
+
const rationalePath = join(projectRoot, "biome-rationale.md");
|
|
42
|
+
const biomeConfig = existsSync(biomePath)
|
|
43
|
+
? JSON.parse(readFileSync(biomePath, "utf-8"))
|
|
44
|
+
: null;
|
|
45
|
+
const rationale = existsSync(rationalePath) ? readFileSync(rationalePath, "utf-8") : null;
|
|
46
|
+
return {
|
|
47
|
+
content: [
|
|
48
|
+
{
|
|
49
|
+
type: "text",
|
|
50
|
+
text: JSON.stringify({ biomeConfig, rationale }, null, 2),
|
|
51
|
+
},
|
|
52
|
+
],
|
|
53
|
+
};
|
|
54
|
+
});
|
|
55
|
+
server.registerTool("suggest_rule", {
|
|
56
|
+
description: "Given a mistake description, suggest Biome rules that could prevent it. Searches the installed Biome rule catalog.",
|
|
57
|
+
inputSchema: {
|
|
58
|
+
description: z.string().describe("Description of the mistake or pattern to prevent"),
|
|
59
|
+
},
|
|
60
|
+
}, async ({ description }) => {
|
|
61
|
+
// Get the list of rules from biome rage
|
|
62
|
+
let rulesOutput;
|
|
63
|
+
try {
|
|
64
|
+
rulesOutput = execSync("npx biome rage --linter", {
|
|
65
|
+
cwd: projectRoot,
|
|
66
|
+
encoding: "utf-8",
|
|
67
|
+
timeout: 15000,
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
catch {
|
|
71
|
+
rulesOutput = "";
|
|
72
|
+
}
|
|
73
|
+
// Search for keywords from the description in the rules list
|
|
74
|
+
const keywords = description
|
|
75
|
+
.toLowerCase()
|
|
76
|
+
.split(/\s+/)
|
|
77
|
+
.filter((w) => w.length > 3);
|
|
78
|
+
const lines = rulesOutput.split("\n");
|
|
79
|
+
const matches = lines.filter((line) => {
|
|
80
|
+
const lower = line.toLowerCase();
|
|
81
|
+
return keywords.some((kw) => lower.includes(kw));
|
|
82
|
+
});
|
|
83
|
+
return {
|
|
84
|
+
content: [
|
|
85
|
+
{
|
|
86
|
+
type: "text",
|
|
87
|
+
text: JSON.stringify({
|
|
88
|
+
query: description,
|
|
89
|
+
keywords,
|
|
90
|
+
suggestions: matches.length > 0
|
|
91
|
+
? matches.slice(0, 20)
|
|
92
|
+
: ["No matching rules found — consider a custom approach or check biome docs"],
|
|
93
|
+
}, null, 2),
|
|
94
|
+
},
|
|
95
|
+
],
|
|
96
|
+
};
|
|
97
|
+
});
|
|
98
|
+
server.registerTool("quality_check", {
|
|
99
|
+
description: "Run `biome check` and return structured results with file, line, rule, and severity",
|
|
100
|
+
}, async () => {
|
|
101
|
+
let output;
|
|
102
|
+
let exitCode;
|
|
103
|
+
try {
|
|
104
|
+
output = execSync("npx biome check", {
|
|
105
|
+
cwd: projectRoot,
|
|
106
|
+
encoding: "utf-8",
|
|
107
|
+
timeout: 30000,
|
|
108
|
+
});
|
|
109
|
+
exitCode = 0;
|
|
110
|
+
}
|
|
111
|
+
catch (err) {
|
|
112
|
+
const execErr = err;
|
|
113
|
+
output = (execErr.stdout ?? "") + (execErr.stderr ?? "");
|
|
114
|
+
exitCode = execErr.status ?? 1;
|
|
115
|
+
}
|
|
116
|
+
const diagnostics = parseBiomeOutput(output);
|
|
117
|
+
const passed = exitCode === 0;
|
|
118
|
+
// Extract the summary line
|
|
119
|
+
const summaryMatch = output.match(/Checked \d+ files?.*/);
|
|
120
|
+
const summary = summaryMatch ? summaryMatch[0] : "";
|
|
121
|
+
return {
|
|
122
|
+
content: [
|
|
123
|
+
{
|
|
124
|
+
type: "text",
|
|
125
|
+
text: JSON.stringify({ passed, exitCode, summary, diagnostics }, null, 2),
|
|
126
|
+
},
|
|
127
|
+
],
|
|
128
|
+
};
|
|
129
|
+
});
|
|
130
|
+
}
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import { execSync } from "node:child_process";
|
|
2
|
+
import { createHash } from "node:crypto";
|
|
3
|
+
import { existsSync, readdirSync, readFileSync } from "node:fs";
|
|
4
|
+
import { createConnection } from "node:net";
|
|
5
|
+
import { dirname, join } from "node:path";
|
|
6
|
+
import { fileURLToPath } from "node:url";
|
|
7
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
8
|
+
const packageRoot = join(__dirname, "../..");
|
|
9
|
+
function fileHash(path) {
|
|
10
|
+
return createHash("sha256").update(readFileSync(path)).digest("hex").slice(0, 12);
|
|
11
|
+
}
|
|
12
|
+
export function registerSystemTools(server, projectRoot) {
|
|
13
|
+
server.registerTool("get_system_version", {
|
|
14
|
+
description: "Return the installed indusk-mcp package version",
|
|
15
|
+
}, async () => {
|
|
16
|
+
const pkgPath = join(packageRoot, "package.json");
|
|
17
|
+
const pkg = JSON.parse(readFileSync(pkgPath, "utf-8"));
|
|
18
|
+
return {
|
|
19
|
+
content: [
|
|
20
|
+
{
|
|
21
|
+
type: "text",
|
|
22
|
+
text: JSON.stringify({ name: pkg.name, version: pkg.version }, null, 2),
|
|
23
|
+
},
|
|
24
|
+
],
|
|
25
|
+
};
|
|
26
|
+
});
|
|
27
|
+
server.registerTool("check_health", {
|
|
28
|
+
description: "Check health of dev system dependencies: FalkorDB connectivity, CGC installation, and Docker container status. Errors indicate the system is degraded.",
|
|
29
|
+
}, async () => {
|
|
30
|
+
const checks = [];
|
|
31
|
+
// Check FalkorDB TCP connectivity
|
|
32
|
+
const falkordbHost = process.env.FALKORDB_HOST ?? "localhost";
|
|
33
|
+
const falkordbPort = Number.parseInt(process.env.FALKORDB_PORT ?? "6379", 10);
|
|
34
|
+
const falkordbOk = await new Promise((resolve) => {
|
|
35
|
+
const socket = createConnection({ host: falkordbHost, port: falkordbPort }, () => {
|
|
36
|
+
socket.destroy();
|
|
37
|
+
resolve(true);
|
|
38
|
+
});
|
|
39
|
+
socket.setTimeout(3000);
|
|
40
|
+
socket.on("timeout", () => {
|
|
41
|
+
socket.destroy();
|
|
42
|
+
resolve(false);
|
|
43
|
+
});
|
|
44
|
+
socket.on("error", () => {
|
|
45
|
+
resolve(false);
|
|
46
|
+
});
|
|
47
|
+
});
|
|
48
|
+
checks.push({
|
|
49
|
+
name: "falkordb",
|
|
50
|
+
status: falkordbOk ? "ok" : "error",
|
|
51
|
+
detail: falkordbOk
|
|
52
|
+
? `Connected to ${falkordbHost}:${falkordbPort}`
|
|
53
|
+
: `Cannot connect to FalkorDB at ${falkordbHost}:${falkordbPort} — run: docker start falkordb`,
|
|
54
|
+
});
|
|
55
|
+
// Check CGC installed — cgc prints version to stderr, so check binary exists
|
|
56
|
+
const cgcPaths = [join(process.env.HOME ?? "", ".local/bin/cgc"), "/usr/local/bin/cgc"];
|
|
57
|
+
const cgcPath = cgcPaths.find((p) => existsSync(p));
|
|
58
|
+
checks.push({
|
|
59
|
+
name: "codegraphcontext",
|
|
60
|
+
status: cgcPath ? "ok" : "error",
|
|
61
|
+
detail: cgcPath
|
|
62
|
+
? `CGC found at ${cgcPath}`
|
|
63
|
+
: "CGC not found — install via: pipx install codegraphcontext",
|
|
64
|
+
});
|
|
65
|
+
// Check FalkorDB Docker container
|
|
66
|
+
let containerRunning = false;
|
|
67
|
+
try {
|
|
68
|
+
const ps = execSync('docker ps --filter name=falkordb --format "{{.Status}}"', {
|
|
69
|
+
encoding: "utf-8",
|
|
70
|
+
timeout: 5000,
|
|
71
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
72
|
+
}).trim();
|
|
73
|
+
containerRunning = ps.length > 0;
|
|
74
|
+
checks.push({
|
|
75
|
+
name: "falkordb-container",
|
|
76
|
+
status: containerRunning ? "ok" : "error",
|
|
77
|
+
detail: containerRunning
|
|
78
|
+
? `Container status: ${ps}`
|
|
79
|
+
: "FalkorDB container not running — run: docker start falkordb",
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
catch {
|
|
83
|
+
checks.push({
|
|
84
|
+
name: "falkordb-container",
|
|
85
|
+
status: "error",
|
|
86
|
+
detail: "Docker not available or falkordb container not found",
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
const healthy = checks.every((c) => c.status === "ok");
|
|
90
|
+
return {
|
|
91
|
+
content: [
|
|
92
|
+
{
|
|
93
|
+
type: "text",
|
|
94
|
+
text: JSON.stringify({ healthy, checks }, null, 2),
|
|
95
|
+
},
|
|
96
|
+
],
|
|
97
|
+
isError: !healthy,
|
|
98
|
+
};
|
|
99
|
+
});
|
|
100
|
+
server.registerTool("get_skill_versions", {
|
|
101
|
+
description: "Compare installed skills in .claude/skills/ to package skills. Returns status per skill: installed, current, or outdated.",
|
|
102
|
+
}, async () => {
|
|
103
|
+
const skillsSource = join(packageRoot, "skills");
|
|
104
|
+
const skillsTarget = join(projectRoot, ".claude/skills");
|
|
105
|
+
if (!existsSync(skillsSource)) {
|
|
106
|
+
return {
|
|
107
|
+
content: [
|
|
108
|
+
{
|
|
109
|
+
type: "text",
|
|
110
|
+
text: JSON.stringify({ error: "Package skills directory not found" }, null, 2),
|
|
111
|
+
},
|
|
112
|
+
],
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
const packageSkills = readdirSync(skillsSource).filter((f) => f.endsWith(".md"));
|
|
116
|
+
const skills = packageSkills.map((file) => {
|
|
117
|
+
const skillName = file.replace(".md", "");
|
|
118
|
+
const sourceFile = join(skillsSource, file);
|
|
119
|
+
const targetFile = join(skillsTarget, skillName, "SKILL.md");
|
|
120
|
+
const packageHash = fileHash(sourceFile);
|
|
121
|
+
const installed = existsSync(targetFile);
|
|
122
|
+
const installedHash = installed ? fileHash(targetFile) : null;
|
|
123
|
+
return {
|
|
124
|
+
name: skillName,
|
|
125
|
+
installed,
|
|
126
|
+
current: installed && packageHash === installedHash,
|
|
127
|
+
packageHash,
|
|
128
|
+
installedHash,
|
|
129
|
+
};
|
|
130
|
+
});
|
|
131
|
+
const summary = {
|
|
132
|
+
total: skills.length,
|
|
133
|
+
installed: skills.filter((s) => s.installed).length,
|
|
134
|
+
current: skills.filter((s) => s.current).length,
|
|
135
|
+
outdated: skills.filter((s) => s.installed && !s.current).length,
|
|
136
|
+
missing: skills.filter((s) => !s.installed).length,
|
|
137
|
+
};
|
|
138
|
+
return {
|
|
139
|
+
content: [
|
|
140
|
+
{
|
|
141
|
+
type: "text",
|
|
142
|
+
text: JSON.stringify({ summary, skills }, null, 2),
|
|
143
|
+
},
|
|
144
|
+
],
|
|
145
|
+
};
|
|
146
|
+
});
|
|
147
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@infinitedusky/indusk-mcp",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "InDusk development system — skills, MCP tools, and CLI for structured AI-assisted development",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"files": [
|
|
7
|
+
"dist",
|
|
8
|
+
"skills",
|
|
9
|
+
"templates",
|
|
10
|
+
"!dist/**/*.test.*",
|
|
11
|
+
"!dist/__tests__"
|
|
12
|
+
],
|
|
13
|
+
"bin": {
|
|
14
|
+
"indusk": "dist/bin/cli.js",
|
|
15
|
+
"dev-system": "dist/bin/cli.js"
|
|
16
|
+
},
|
|
17
|
+
"exports": {
|
|
18
|
+
".": "./dist/server/index.js"
|
|
19
|
+
},
|
|
20
|
+
"scripts": {
|
|
21
|
+
"dev": "tsx watch src/server/index.ts",
|
|
22
|
+
"build": "tsc",
|
|
23
|
+
"start": "node dist/server/index.js",
|
|
24
|
+
"test": "vitest run"
|
|
25
|
+
},
|
|
26
|
+
"dependencies": {
|
|
27
|
+
"@modelcontextprotocol/sdk": "^1.12.1",
|
|
28
|
+
"commander": "^13.0.0",
|
|
29
|
+
"glob": "^11.0.0",
|
|
30
|
+
"gray-matter": "^4.0.3",
|
|
31
|
+
"zod": "^4.3.6"
|
|
32
|
+
},
|
|
33
|
+
"devDependencies": {
|
|
34
|
+
"@types/node": "^22.0.0",
|
|
35
|
+
"tsx": "^4.19.0",
|
|
36
|
+
"typescript": "^5.7.0"
|
|
37
|
+
}
|
|
38
|
+
}
|