@10kdevs/matha 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +222 -0
- package/dist/analysis/contract-matcher.js +223 -0
- package/dist/analysis/git-analyser.js +261 -0
- package/dist/analysis/stability-classifier.js +122 -0
- package/dist/brain/cortex.js +258 -0
- package/dist/brain/dopamine.js +184 -0
- package/dist/brain/frontal-lobe.js +219 -0
- package/dist/brain/hippocampus.js +134 -0
- package/dist/commands/after.js +334 -0
- package/dist/commands/before.js +328 -0
- package/dist/commands/init.js +266 -0
- package/dist/commands/migrate.js +16 -0
- package/dist/index.js +114 -0
- package/dist/mcp/server.js +305 -0
- package/dist/mcp/tools.js +379 -0
- package/dist/storage/reader.js +29 -0
- package/dist/storage/writer.js +111 -0
- package/dist/utils/markdown-parser.js +173 -0
- package/dist/utils/schema-version.js +91 -0
- package/package.json +62 -0
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import * as fs from 'fs/promises';
|
|
2
|
+
/**
|
|
3
|
+
* Reads and parses a JSON file.
|
|
4
|
+
*
|
|
5
|
+
* Throws if the file does not exist or contains invalid JSON.
|
|
6
|
+
* Use {@link readJsonOrNull} when a missing file is an expected case.
|
|
7
|
+
*/
|
|
8
|
+
export async function readJson(filePath) {
|
|
9
|
+
const content = await fs.readFile(filePath, 'utf-8');
|
|
10
|
+
return JSON.parse(content);
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Reads and parses a JSON file, returning `null` when the file is absent.
|
|
14
|
+
*
|
|
15
|
+
* - Returns `null` if the file (or any parent directory) does not exist.
|
|
16
|
+
* - **Never throws on missing files.**
|
|
17
|
+
* - Still throws on invalid JSON — that is a data-integrity issue, not a
|
|
18
|
+
* missing-file issue.
|
|
19
|
+
*/
|
|
20
|
+
export async function readJsonOrNull(filePath) {
|
|
21
|
+
try {
|
|
22
|
+
return await readJson(filePath);
|
|
23
|
+
}
|
|
24
|
+
catch (err) {
|
|
25
|
+
if (err.code === 'ENOENT')
|
|
26
|
+
return null;
|
|
27
|
+
throw err;
|
|
28
|
+
}
|
|
29
|
+
}
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
import * as fs from 'fs/promises';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
import * as crypto from 'crypto';
|
|
4
|
+
/**
|
|
5
|
+
* Atomically writes JSON data to a file.
|
|
6
|
+
*
|
|
7
|
+
* Pattern: serialize → validate → write to .tmp → rename to final path.
|
|
8
|
+
* Never writes directly to the target file.
|
|
9
|
+
*
|
|
10
|
+
* @param filePath Absolute path to the target file.
|
|
11
|
+
* @param data Data to serialize as JSON. Must be JSON-serializable.
|
|
12
|
+
* @param options.overwrite If true, replaces an existing file. Default: false.
|
|
13
|
+
*/
|
|
14
|
+
export async function writeAtomic(filePath, data, options) {
|
|
15
|
+
// ── 1. Validate JSON serialization ────────────────────────────
|
|
16
|
+
let json;
|
|
17
|
+
try {
|
|
18
|
+
json = JSON.stringify(data, null, 2);
|
|
19
|
+
}
|
|
20
|
+
catch (err) {
|
|
21
|
+
throw new Error(`Data is not JSON-serializable: ${err.message}`);
|
|
22
|
+
}
|
|
23
|
+
if (typeof json !== 'string') {
|
|
24
|
+
throw new Error('Data cannot be serialized to JSON');
|
|
25
|
+
}
|
|
26
|
+
// ── 2. Guard against silent overwrite ─────────────────────────
|
|
27
|
+
if (!options?.overwrite) {
|
|
28
|
+
let exists = true;
|
|
29
|
+
try {
|
|
30
|
+
await fs.access(filePath);
|
|
31
|
+
}
|
|
32
|
+
catch {
|
|
33
|
+
exists = false;
|
|
34
|
+
}
|
|
35
|
+
if (exists) {
|
|
36
|
+
throw new Error(`File already exists and overwrite is not enabled: ${filePath}`);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
// ── 3. Ensure parent directories exist ────────────────────────
|
|
40
|
+
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
|
41
|
+
// ── 4. Atomic write: temp file → rename ───────────────────────
|
|
42
|
+
const tmpSuffix = `${process.pid}.${crypto.randomBytes(4).toString('hex')}.tmp`;
|
|
43
|
+
const tmpPath = `${filePath}.${tmpSuffix}`;
|
|
44
|
+
try {
|
|
45
|
+
await fs.writeFile(tmpPath, json, 'utf-8');
|
|
46
|
+
await fs.rename(tmpPath, filePath);
|
|
47
|
+
}
|
|
48
|
+
catch (err) {
|
|
49
|
+
// Best-effort cleanup of the temp file
|
|
50
|
+
try {
|
|
51
|
+
await fs.unlink(tmpPath);
|
|
52
|
+
}
|
|
53
|
+
catch {
|
|
54
|
+
/* ignore cleanup errors */
|
|
55
|
+
}
|
|
56
|
+
throw err;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Appends an item to a JSON array file.
|
|
61
|
+
*
|
|
62
|
+
* - Creates the file with `[item]` if it does not yet exist.
|
|
63
|
+
* - Throws if the existing file does not contain a JSON array.
|
|
64
|
+
* - Uses the atomic-write pattern internally.
|
|
65
|
+
*/
|
|
66
|
+
export async function appendToArray(filePath, item) {
|
|
67
|
+
let existing = [];
|
|
68
|
+
try {
|
|
69
|
+
const raw = await fs.readFile(filePath, 'utf-8');
|
|
70
|
+
const parsed = JSON.parse(raw);
|
|
71
|
+
if (!Array.isArray(parsed)) {
|
|
72
|
+
throw new Error(`Cannot append to non-array. File contains ${typeof parsed}`);
|
|
73
|
+
}
|
|
74
|
+
existing = parsed;
|
|
75
|
+
}
|
|
76
|
+
catch (err) {
|
|
77
|
+
if (err.code !== 'ENOENT')
|
|
78
|
+
throw err;
|
|
79
|
+
// File does not exist — start with an empty array
|
|
80
|
+
}
|
|
81
|
+
existing.push(item);
|
|
82
|
+
await writeAtomic(filePath, existing, { overwrite: true });
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* Shallow-merges a partial object into a JSON object file.
|
|
86
|
+
*
|
|
87
|
+
* - Creates the file with `partial` if it does not yet exist.
|
|
88
|
+
* - Throws if the existing file is not a plain JSON object (rejects arrays
|
|
89
|
+
* and primitives).
|
|
90
|
+
* - Uses the atomic-write pattern internally.
|
|
91
|
+
*/
|
|
92
|
+
export async function mergeObject(filePath, partial) {
|
|
93
|
+
let existing = {};
|
|
94
|
+
try {
|
|
95
|
+
const raw = await fs.readFile(filePath, 'utf-8');
|
|
96
|
+
const parsed = JSON.parse(raw);
|
|
97
|
+
if (typeof parsed !== 'object' ||
|
|
98
|
+
parsed === null ||
|
|
99
|
+
Array.isArray(parsed)) {
|
|
100
|
+
throw new Error(`Cannot merge into non-object. File contains ${Array.isArray(parsed) ? 'array' : typeof parsed}`);
|
|
101
|
+
}
|
|
102
|
+
existing = parsed;
|
|
103
|
+
}
|
|
104
|
+
catch (err) {
|
|
105
|
+
if (err.code !== 'ENOENT')
|
|
106
|
+
throw err;
|
|
107
|
+
// File does not exist — start with an empty object
|
|
108
|
+
}
|
|
109
|
+
const merged = { ...existing, ...partial };
|
|
110
|
+
await writeAtomic(filePath, merged, { overwrite: true });
|
|
111
|
+
}
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import * as fs from 'fs/promises';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
// ──────────────────────────────────────────────────────────────
|
|
4
|
+
// HEADING KEYWORD GROUPS (case-insensitive substring match)
|
|
5
|
+
// ──────────────────────────────────────────────────────────────
|
|
6
|
+
const WHY_KEYWORDS = ['overview', 'purpose', 'why', 'problem', 'about'];
|
|
7
|
+
const RULES_KEYWORDS = [
|
|
8
|
+
'business rules',
|
|
9
|
+
'non-negotiable',
|
|
10
|
+
'constraints',
|
|
11
|
+
'requirements',
|
|
12
|
+
'rules',
|
|
13
|
+
];
|
|
14
|
+
const BOUNDARIES_KEYWORDS = [
|
|
15
|
+
'out of scope',
|
|
16
|
+
'not in scope',
|
|
17
|
+
'exclusions',
|
|
18
|
+
'not doing',
|
|
19
|
+
'boundaries',
|
|
20
|
+
'limitations',
|
|
21
|
+
];
|
|
22
|
+
const OWNER_KEYWORDS = ['owner', 'team', 'contact', 'maintainer'];
|
|
23
|
+
const SUPPORTED_EXTENSIONS = new Set(['.md', '.txt']);
|
|
24
|
+
/**
|
|
25
|
+
* Parse a markdown or text file and extract brain seed data.
|
|
26
|
+
*
|
|
27
|
+
* **Throws** only on:
|
|
28
|
+
* - File not found
|
|
29
|
+
* - Unsupported file extension
|
|
30
|
+
*
|
|
31
|
+
* **Never throws** on parse failures — returns empty/partial seed.
|
|
32
|
+
*/
|
|
33
|
+
export async function parseMarkdownFile(filepath) {
|
|
34
|
+
// Validate extension
|
|
35
|
+
const ext = path.extname(filepath).toLowerCase();
|
|
36
|
+
if (!SUPPORTED_EXTENSIONS.has(ext)) {
|
|
37
|
+
throw new Error('Only .md and .txt files are supported');
|
|
38
|
+
}
|
|
39
|
+
// Read file (throws on not found)
|
|
40
|
+
let content;
|
|
41
|
+
try {
|
|
42
|
+
content = await fs.readFile(filepath, 'utf-8');
|
|
43
|
+
}
|
|
44
|
+
catch (err) {
|
|
45
|
+
if (err.code === 'ENOENT') {
|
|
46
|
+
throw new Error(`File not found: ${filepath}`);
|
|
47
|
+
}
|
|
48
|
+
throw err;
|
|
49
|
+
}
|
|
50
|
+
// Parse — never throw from here
|
|
51
|
+
try {
|
|
52
|
+
return parseContent(content);
|
|
53
|
+
}
|
|
54
|
+
catch {
|
|
55
|
+
return { why: null, rules: [], boundaries: [], owner: null };
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Split markdown content into sections keyed by heading.
|
|
60
|
+
* Returns an array of { heading, lines } where lines are the
|
|
61
|
+
* content lines under that heading (not including the heading itself).
|
|
62
|
+
* The first section has heading '' (content before any heading).
|
|
63
|
+
*/
|
|
64
|
+
function splitSections(content) {
|
|
65
|
+
const lines = content.split('\n');
|
|
66
|
+
const sections = [];
|
|
67
|
+
let current = { heading: '', lines: [] };
|
|
68
|
+
for (const line of lines) {
|
|
69
|
+
const headingMatch = line.match(/^#{1,6}\s+(.+)$/);
|
|
70
|
+
if (headingMatch) {
|
|
71
|
+
sections.push(current);
|
|
72
|
+
current = { heading: headingMatch[1].trim(), lines: [] };
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
current.lines.push(line);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
sections.push(current);
|
|
79
|
+
return sections;
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Find a section whose heading contains one of the given keywords
|
|
83
|
+
* (case-insensitive substring match).
|
|
84
|
+
*/
|
|
85
|
+
function findSection(sections, keywords) {
|
|
86
|
+
for (const section of sections) {
|
|
87
|
+
const lower = section.heading.toLowerCase();
|
|
88
|
+
for (const kw of keywords) {
|
|
89
|
+
if (lower.includes(kw)) {
|
|
90
|
+
return section;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
return null;
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* Extract the first non-empty paragraph (consecutive non-blank lines)
|
|
98
|
+
* from an array of lines.
|
|
99
|
+
*/
|
|
100
|
+
function firstParagraph(lines) {
|
|
101
|
+
const paraLines = [];
|
|
102
|
+
let started = false;
|
|
103
|
+
for (const line of lines) {
|
|
104
|
+
const trimmed = line.trim();
|
|
105
|
+
if (trimmed === '') {
|
|
106
|
+
if (started)
|
|
107
|
+
break;
|
|
108
|
+
continue;
|
|
109
|
+
}
|
|
110
|
+
// Skip bullet lines
|
|
111
|
+
if (/^[-*]\s+/.test(trimmed)) {
|
|
112
|
+
if (started)
|
|
113
|
+
break;
|
|
114
|
+
continue;
|
|
115
|
+
}
|
|
116
|
+
started = true;
|
|
117
|
+
paraLines.push(trimmed);
|
|
118
|
+
}
|
|
119
|
+
return paraLines.length > 0 ? paraLines.join(' ') : null;
|
|
120
|
+
}
|
|
121
|
+
/**
|
|
122
|
+
* Extract all bullet points (- item or * item) from an array of lines.
|
|
123
|
+
* Strips leading -/* and whitespace.
|
|
124
|
+
*/
|
|
125
|
+
function extractBullets(lines) {
|
|
126
|
+
const bullets = [];
|
|
127
|
+
for (const line of lines) {
|
|
128
|
+
const match = line.match(/^\s*[-*]\s+(.+)$/);
|
|
129
|
+
if (match) {
|
|
130
|
+
const text = match[1].trim();
|
|
131
|
+
if (text)
|
|
132
|
+
bullets.push(text);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
return bullets;
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Extract the first non-empty line from an array of lines.
|
|
139
|
+
*/
|
|
140
|
+
function firstNonEmptyLine(lines) {
|
|
141
|
+
for (const line of lines) {
|
|
142
|
+
const trimmed = line.trim();
|
|
143
|
+
if (trimmed)
|
|
144
|
+
return trimmed;
|
|
145
|
+
}
|
|
146
|
+
return null;
|
|
147
|
+
}
|
|
148
|
+
function parseContent(content) {
|
|
149
|
+
const sections = splitSections(content);
|
|
150
|
+
// WHY: look for matching heading, fall back to first paragraph of document
|
|
151
|
+
let why = null;
|
|
152
|
+
const whySection = findSection(sections, WHY_KEYWORDS);
|
|
153
|
+
if (whySection) {
|
|
154
|
+
why = firstParagraph(whySection.lines);
|
|
155
|
+
}
|
|
156
|
+
if (!why) {
|
|
157
|
+
// Fallback: first paragraph of the entire document (all sections)
|
|
158
|
+
const allLines = sections.flatMap((s) => s.lines);
|
|
159
|
+
why = firstParagraph(allLines);
|
|
160
|
+
}
|
|
161
|
+
// RULES
|
|
162
|
+
const rulesSection = findSection(sections, RULES_KEYWORDS);
|
|
163
|
+
const rules = rulesSection ? extractBullets(rulesSection.lines) : [];
|
|
164
|
+
// BOUNDARIES
|
|
165
|
+
const boundariesSection = findSection(sections, BOUNDARIES_KEYWORDS);
|
|
166
|
+
const boundaries = boundariesSection
|
|
167
|
+
? extractBullets(boundariesSection.lines)
|
|
168
|
+
: [];
|
|
169
|
+
// OWNER
|
|
170
|
+
const ownerSection = findSection(sections, OWNER_KEYWORDS);
|
|
171
|
+
const owner = ownerSection ? firstNonEmptyLine(ownerSection.lines) : null;
|
|
172
|
+
return { why, rules, boundaries, owner };
|
|
173
|
+
}
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import * as fs from 'fs/promises';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
/**
|
|
4
|
+
* Single source of truth for the current schema version.
|
|
5
|
+
* Bump this when the .matha/ directory structure changes.
|
|
6
|
+
*/
|
|
7
|
+
export const CURRENT_SCHEMA_VERSION = '0.1.0';
|
|
8
|
+
/**
|
|
9
|
+
* Compare two semver strings (major.minor.patch).
|
|
10
|
+
* Returns -1 if a < b, 0 if a === b, 1 if a > b.
|
|
11
|
+
*/
|
|
12
|
+
function compareSemver(a, b) {
|
|
13
|
+
const pa = a.split('.').map(Number);
|
|
14
|
+
const pb = b.split('.').map(Number);
|
|
15
|
+
for (let i = 0; i < 3; i++) {
|
|
16
|
+
const av = pa[i] ?? 0;
|
|
17
|
+
const bv = pb[i] ?? 0;
|
|
18
|
+
if (av < bv)
|
|
19
|
+
return -1;
|
|
20
|
+
if (av > bv)
|
|
21
|
+
return 1;
|
|
22
|
+
}
|
|
23
|
+
return 0;
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Check the schema version in .matha/config.json.
|
|
27
|
+
*
|
|
28
|
+
* **Never throws** — always returns a result, even on malformed or
|
|
29
|
+
* missing config files.
|
|
30
|
+
*/
|
|
31
|
+
export async function checkSchemaVersion(mathaDir) {
|
|
32
|
+
try {
|
|
33
|
+
const configPath = path.join(mathaDir, 'config.json');
|
|
34
|
+
let raw;
|
|
35
|
+
try {
|
|
36
|
+
raw = await fs.readFile(configPath, 'utf-8');
|
|
37
|
+
}
|
|
38
|
+
catch {
|
|
39
|
+
// File does not exist → uninitialised
|
|
40
|
+
return { status: 'uninitialised', version: null };
|
|
41
|
+
}
|
|
42
|
+
let config;
|
|
43
|
+
try {
|
|
44
|
+
config = JSON.parse(raw);
|
|
45
|
+
}
|
|
46
|
+
catch {
|
|
47
|
+
// Malformed JSON → treat as uninitialised (safe fallback)
|
|
48
|
+
return { status: 'uninitialised', version: null };
|
|
49
|
+
}
|
|
50
|
+
if (config === null || typeof config !== 'object') {
|
|
51
|
+
return { status: 'uninitialised', version: null };
|
|
52
|
+
}
|
|
53
|
+
const schemaVersion = config.schema_version;
|
|
54
|
+
if (schemaVersion === undefined || schemaVersion === null) {
|
|
55
|
+
return { status: 'legacy', version: null };
|
|
56
|
+
}
|
|
57
|
+
const version = String(schemaVersion);
|
|
58
|
+
const cmp = compareSemver(version, CURRENT_SCHEMA_VERSION);
|
|
59
|
+
if (cmp === 0)
|
|
60
|
+
return { status: 'ok', version };
|
|
61
|
+
if (cmp < 0)
|
|
62
|
+
return { status: 'outdated', version };
|
|
63
|
+
return { status: 'newer', version };
|
|
64
|
+
}
|
|
65
|
+
catch {
|
|
66
|
+
// Catch-all: never throw, always return a result
|
|
67
|
+
return { status: 'uninitialised', version: null };
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Return a human-readable message for the given schema check result.
|
|
72
|
+
* Returns `null` when no message is needed (ok, uninitialised).
|
|
73
|
+
*/
|
|
74
|
+
export function getSchemaMessage(result) {
|
|
75
|
+
switch (result.status) {
|
|
76
|
+
case 'ok':
|
|
77
|
+
case 'uninitialised':
|
|
78
|
+
return null;
|
|
79
|
+
case 'legacy':
|
|
80
|
+
return ('⚠ This .matha/ was created before schema versioning. ' +
|
|
81
|
+
'Run `matha migrate` to upgrade. (Coming in v0.2.0)');
|
|
82
|
+
case 'outdated':
|
|
83
|
+
return (`⚠ This .matha/ uses schema v${result.version}. ` +
|
|
84
|
+
`Current is v${CURRENT_SCHEMA_VERSION}. ` +
|
|
85
|
+
'Run `matha migrate` to upgrade. (Coming in v0.2.0)');
|
|
86
|
+
case 'newer':
|
|
87
|
+
return (`✗ This .matha/ uses schema v${result.version} which is newer ` +
|
|
88
|
+
`than this version of MATHA (v${CURRENT_SCHEMA_VERSION}). ` +
|
|
89
|
+
'Upgrade MATHA: npm install -g matha');
|
|
90
|
+
}
|
|
91
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@10kdevs/matha",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "The persistent cognitive layer for AI-assisted development. Gives AI agents the project context that currently only exists inside a senior engineer's head.",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"bin": {
|
|
7
|
+
"matha": "dist/index.js"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"dist/",
|
|
11
|
+
"README.md",
|
|
12
|
+
"LICENSE"
|
|
13
|
+
],
|
|
14
|
+
"scripts": {
|
|
15
|
+
"build": "tsc && tsc-alias",
|
|
16
|
+
"test": "vitest",
|
|
17
|
+
"dev": "tsx src/index.ts",
|
|
18
|
+
"serve": "tsx src/index.ts serve",
|
|
19
|
+
"cli": "tsx src/index.ts",
|
|
20
|
+
"prepublishOnly": "npm run build && npx vitest run"
|
|
21
|
+
},
|
|
22
|
+
"type": "module",
|
|
23
|
+
"keywords": [
|
|
24
|
+
"ai",
|
|
25
|
+
"mcp",
|
|
26
|
+
"llm",
|
|
27
|
+
"context",
|
|
28
|
+
"memory",
|
|
29
|
+
"claude",
|
|
30
|
+
"cursor",
|
|
31
|
+
"copilot",
|
|
32
|
+
"developer-tools",
|
|
33
|
+
"productivity",
|
|
34
|
+
"model-context-protocol"
|
|
35
|
+
],
|
|
36
|
+
"author": "Bhupesh",
|
|
37
|
+
"license": "MIT",
|
|
38
|
+
"homepage": "https://github.com/bhupesh003/matha#readme",
|
|
39
|
+
"repository": {
|
|
40
|
+
"type": "git",
|
|
41
|
+
"url": "git+https://github.com/bhupesh003/matha.git"
|
|
42
|
+
},
|
|
43
|
+
"bugs": {
|
|
44
|
+
"url": "https://github.com/bhupesh003/matha/issues"
|
|
45
|
+
},
|
|
46
|
+
"engines": {
|
|
47
|
+
"node": ">=20.0.0"
|
|
48
|
+
},
|
|
49
|
+
"dependencies": {
|
|
50
|
+
"@modelcontextprotocol/sdk": "^1.27.1",
|
|
51
|
+
"commander": "^14.0.3",
|
|
52
|
+
"inquirer": "^13.3.0",
|
|
53
|
+
"simple-git": "^3.32.3",
|
|
54
|
+
"typescript": "^5.9.3"
|
|
55
|
+
},
|
|
56
|
+
"devDependencies": {
|
|
57
|
+
"@types/node": "^25.3.3",
|
|
58
|
+
"tsc-alias": "^1.8.16",
|
|
59
|
+
"tsx": "^4.21.0",
|
|
60
|
+
"vitest": "^4.0.18"
|
|
61
|
+
}
|
|
62
|
+
}
|