mcp-multitool 0.1.10 → 0.1.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +59 -37
- package/dist/index.js +3 -0
- package/dist/tools/cloneFileOrDir.d.ts +2 -0
- package/dist/tools/cloneFileOrDir.js +55 -0
- package/dist/tools/readLogFile.js +223 -100
- package/package.json +1 -2
package/README.md
CHANGED
|
@@ -12,6 +12,7 @@ A [Model Context Protocol (MCP)](https://modelcontextprotocol.io) server with **
|
|
|
12
12
|
| ----------------- | ------------------------------------------------------- |
|
|
13
13
|
| `astGrepSearch` | Search code using AST patterns |
|
|
14
14
|
| `checkFileOrDir` | Check if a file or directory exists and return metadata |
|
|
15
|
+
| `cloneFileOrDir` | Copy one or more files or directories to a destination |
|
|
15
16
|
| `deleteFileOrDir` | Delete one or more files or directories |
|
|
16
17
|
| `moveFileOrDir` | Move one or more files or directories to a new location |
|
|
17
18
|
| `readLogFile` | Read and compress logs with 60-90% token reduction |
|
|
@@ -104,6 +105,28 @@ checkFileOrDir path="./src/index.ts"
|
|
|
104
105
|
|
|
105
106
|
---
|
|
106
107
|
|
|
108
|
+
### `cloneFileOrDir`
|
|
109
|
+
|
|
110
|
+
Copy one or more files or directories to a destination directory.
|
|
111
|
+
|
|
112
|
+
| Parameter | Type | Required | Description |
|
|
113
|
+
| ----------- | -------------------- | -------- | ---------------------------------- |
|
|
114
|
+
| `from` | `string \| string[]` | ✅ | Source path(s) to clone. |
|
|
115
|
+
| `to` | `string` | ✅ | Destination directory. |
|
|
116
|
+
| `overwrite` | `boolean` | ✅ | If true, overwrite existing files. |
|
|
117
|
+
|
|
118
|
+
**Response:** JSON array of `{source, destination}` objects showing each cloned path.
|
|
119
|
+
|
|
120
|
+
**Examples:**
|
|
121
|
+
|
|
122
|
+
```
|
|
123
|
+
cloneFileOrDir from="config.json" to="backup/" overwrite=false
|
|
124
|
+
cloneFileOrDir from=["a.txt", "b.txt"] to="copies/" overwrite=false
|
|
125
|
+
cloneFileOrDir from="src/" to="archive/" overwrite=true
|
|
126
|
+
```
|
|
127
|
+
|
|
128
|
+
---
|
|
129
|
+
|
|
107
130
|
### `deleteFileOrDir`
|
|
108
131
|
|
|
109
132
|
Delete one or more files or directories.
|
|
@@ -149,47 +172,44 @@ moveFileOrDir from="config.json" to="dest/" overwrite=true
|
|
|
149
172
|
|
|
150
173
|
### `readLogFile`
|
|
151
174
|
|
|
152
|
-
Compress a log file using semantic pattern extraction
|
|
175
|
+
Compress a log file using semantic pattern extraction. Groups similar lines into templates with `<*>` wildcards for variable parts. **Stateless** — each call processes the file fresh.
|
|
153
176
|
|
|
154
|
-
**
|
|
177
|
+
**Content-hashed template IDs:** Template IDs are 12-character base64URL hashes derived from the pattern itself. The same pattern **always** gets the same ID, regardless of file order or when you call the tool. This means drill-down always works if the pattern still exists.
|
|
155
178
|
|
|
156
|
-
| Parameter | Type | Required | Description
|
|
157
|
-
| -------------- | --------- | -------- |
|
|
158
|
-
| `path` | `string` | ✅ | Path to the log file.
|
|
159
|
-
| `
|
|
160
|
-
| `
|
|
161
|
-
| `
|
|
162
|
-
| `
|
|
163
|
-
| `
|
|
164
|
-
| `grep` | `string` | — | Regex filter for lines. |
|
|
179
|
+
| Parameter | Type | Required | Description |
|
|
180
|
+
| -------------- | --------- | -------- | ------------------------------------------------------------------ |
|
|
181
|
+
| `path` | `string` | ✅ | Path to the log file. |
|
|
182
|
+
| `simThreshold` | `number` | ✅ | Similarity threshold (0-1). Lower values group more aggressively. |
|
|
183
|
+
| `tail` | `integer` | — | Last N lines. |
|
|
184
|
+
| `head` | `integer` | — | First N lines. |
|
|
185
|
+
| `grep` | `string` | — | Regex filter for lines before compression. |
|
|
186
|
+
| `templateId` | `string` | — | Drill into a specific template by its hash ID for sample captures. |
|
|
165
187
|
|
|
166
|
-
**Response:** Compressed log summary showing
|
|
188
|
+
**Response:** Compressed log summary showing template IDs, occurrence counts, and patterns with `<*>` wildcards.
|
|
167
189
|
|
|
168
190
|
**Examples:**
|
|
169
191
|
|
|
170
192
|
```
|
|
171
|
-
readLogFile path="/var/log/app.log"
|
|
172
|
-
readLogFile path="./logs/server.log"
|
|
173
|
-
readLogFile path="app.log"
|
|
193
|
+
readLogFile path="/var/log/app.log" simThreshold=0.4
|
|
194
|
+
readLogFile path="./logs/server.log" simThreshold=0.4 tail=1000
|
|
195
|
+
readLogFile path="app.log" simThreshold=0.3 grep="ERROR|WARN"
|
|
196
|
+
readLogFile path="app.log" simThreshold=0.4 templateId="aB3x_Yz7Q2Kf"
|
|
174
197
|
```
|
|
175
198
|
|
|
176
|
-
|
|
199
|
+
<details>
|
|
200
|
+
<summary><strong>Algorithm Notes</strong></summary>
|
|
177
201
|
|
|
178
|
-
|
|
202
|
+
This tool implements the [Drain algorithm](https://jiemingzhu.github.io/pub/pjhe_icws2017.pdf) (He et al., ICWS 2017) for online log parsing with content-hashed template IDs:
|
|
179
203
|
|
|
180
|
-
|
|
204
|
+
**Tree routing:** Lines are routed by token count → first N tokens (default N=2, configurable via `readLogFileRoutingDepth`). This deterministic routing ensures lines with different prefixes are never compared, preventing "cross-contamination" between unrelated patterns.
|
|
181
205
|
|
|
182
|
-
|
|
183
|
-
| --------- | -------- | -------- | ------------------------------ |
|
|
184
|
-
| `path` | `string` | ✅ | Path to the log file to flush. |
|
|
206
|
+
**Content-hashed template IDs:** Template IDs are 12-character base64URL hashes derived from the pattern itself. The same pattern always produces the same ID, enabling stateless drill-down across calls.
|
|
185
207
|
|
|
186
|
-
**
|
|
208
|
+
**Wildcard matching:** Variable tokens (timestamps, IDs, numbers) are replaced with `<*>` wildcards. Tokens starting with digits or matching hex patterns are automatically routed to wildcard buckets.
|
|
187
209
|
|
|
188
|
-
**
|
|
210
|
+
**Tuning:** If you see all-wildcard templates (e.g., `<*> <*> <*> <*>`), try increasing `readLogFileRoutingDepth` to 3 or 4. If you hit memory issues on very large logs, reduce it to 1.
|
|
189
211
|
|
|
190
|
-
|
|
191
|
-
flushLogFile path="/var/log/app.log"
|
|
192
|
-
```
|
|
212
|
+
</details>
|
|
193
213
|
|
|
194
214
|
---
|
|
195
215
|
|
|
@@ -234,17 +254,19 @@ wait durationSeconds=1 reason="animation to complete"
|
|
|
234
254
|
|
|
235
255
|
## Environment Variables
|
|
236
256
|
|
|
237
|
-
| Variable
|
|
238
|
-
|
|
|
239
|
-
| `waitMaxDurationSeconds`
|
|
240
|
-
| `readLogFileTimeoutMs`
|
|
241
|
-
| `
|
|
242
|
-
| `
|
|
243
|
-
| `
|
|
244
|
-
| `
|
|
245
|
-
| `
|
|
246
|
-
| `
|
|
247
|
-
| `
|
|
257
|
+
| Variable | Default | Description |
|
|
258
|
+
| ------------------------- | ------- | ------------------------------------------------------------------------------------------------------------------------------- |
|
|
259
|
+
| `waitMaxDurationSeconds` | `300` | Override the maximum allowed `durationSeconds`. Must be a positive number. Server refuses to start if invalid. |
|
|
260
|
+
| `readLogFileTimeoutMs` | `5000` | Override the timeout for `readLogFile` processing in milliseconds. Server refuses to start if invalid. |
|
|
261
|
+
| `readLogFileRoutingDepth` | `2` | Tree routing depth (1-5). Higher values isolate more but increase memory. Tune if you see all-wildcard templates or OOM errors. |
|
|
262
|
+
| `astGrepSearch` | _(on)_ | Set to `"false"` to disable the `astGrepSearch` tool at startup. |
|
|
263
|
+
| `checkFileOrDir` | _(on)_ | Set to `"false"` to disable the `checkFileOrDir` tool at startup. |
|
|
264
|
+
| `cloneFileOrDir` | _(on)_ | Set to `"false"` to disable the `cloneFileOrDir` tool at startup. |
|
|
265
|
+
| `deleteFileOrDir` | _(on)_ | Set to `"false"` to disable the `deleteFileOrDir` tool at startup. |
|
|
266
|
+
| `moveFileOrDir` | _(on)_ | Set to `"false"` to disable the `moveFileOrDir` tool at startup. |
|
|
267
|
+
| `readLogFile` | _(on)_ | Set to `"false"` to disable the `readLogFile` tool at startup. |
|
|
268
|
+
| `renameFileOrDir` | _(on)_ | Set to `"false"` to disable the `renameFileOrDir` tool at startup. |
|
|
269
|
+
| `wait` | _(on)_ | Set to `"false"` to disable the `wait` tool at startup. |
|
|
248
270
|
|
|
249
271
|
### Disabling Individual Tools
|
|
250
272
|
|
package/dist/index.js
CHANGED
|
@@ -4,6 +4,7 @@ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
|
4
4
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
5
5
|
import { register as registerAstGrepSearch } from "./tools/astGrepSearch.js";
|
|
6
6
|
import { register as registerCheckFileOrDir } from "./tools/checkFileOrDir.js";
|
|
7
|
+
import { register as registerCloneFileOrDir } from "./tools/cloneFileOrDir.js";
|
|
7
8
|
import { register as registerDeleteFileOrDir } from "./tools/deleteFileOrDir.js";
|
|
8
9
|
import { register as registerMoveFileOrDir } from "./tools/moveFileOrDir.js";
|
|
9
10
|
import { register as registerReadLogFile } from "./tools/readLogFile.js";
|
|
@@ -17,6 +18,8 @@ if (isEnabled("astGrepSearch"))
|
|
|
17
18
|
registerAstGrepSearch(server);
|
|
18
19
|
if (isEnabled("checkFileOrDir"))
|
|
19
20
|
registerCheckFileOrDir(server);
|
|
21
|
+
if (isEnabled("cloneFileOrDir"))
|
|
22
|
+
registerCloneFileOrDir(server);
|
|
20
23
|
if (isEnabled("deleteFileOrDir"))
|
|
21
24
|
registerDeleteFileOrDir(server);
|
|
22
25
|
if (isEnabled("moveFileOrDir"))
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import { access, cp } from "node:fs/promises";
|
|
2
|
+
import { basename, join, resolve } from "node:path";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
const schema = z.object({
|
|
5
|
+
from: z
|
|
6
|
+
.union([z.string(), z.array(z.string())])
|
|
7
|
+
.describe("Source path(s) to clone."),
|
|
8
|
+
to: z.string().describe("Destination directory."),
|
|
9
|
+
overwrite: z.boolean().describe("If true, overwrite existing files."),
|
|
10
|
+
});
|
|
11
|
+
async function exists(path) {
|
|
12
|
+
try {
|
|
13
|
+
await access(path);
|
|
14
|
+
return true;
|
|
15
|
+
}
|
|
16
|
+
catch {
|
|
17
|
+
return false;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
export function register(server) {
|
|
21
|
+
server.registerTool("cloneFileOrDir", {
|
|
22
|
+
description: "Copy one or more files or directories to a destination directory.",
|
|
23
|
+
inputSchema: schema,
|
|
24
|
+
annotations: {
|
|
25
|
+
destructiveHint: true,
|
|
26
|
+
openWorldHint: false,
|
|
27
|
+
},
|
|
28
|
+
}, async (input) => {
|
|
29
|
+
try {
|
|
30
|
+
const sources = Array.isArray(input.from) ? input.from : [input.from];
|
|
31
|
+
const results = [];
|
|
32
|
+
for (const src of sources) {
|
|
33
|
+
const srcPath = resolve(process.cwd(), src);
|
|
34
|
+
const destPath = join(resolve(process.cwd(), input.to), basename(src));
|
|
35
|
+
if (!input.overwrite && (await exists(destPath))) {
|
|
36
|
+
throw new Error(`Destination exists: ${destPath}. Set overwrite=true to replace.`);
|
|
37
|
+
}
|
|
38
|
+
await cp(srcPath, destPath, {
|
|
39
|
+
recursive: true,
|
|
40
|
+
force: input.overwrite,
|
|
41
|
+
});
|
|
42
|
+
results.push({ source: srcPath, destination: destPath });
|
|
43
|
+
}
|
|
44
|
+
return {
|
|
45
|
+
content: [{ type: "text", text: JSON.stringify(results, null, 2) }],
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
catch (err) {
|
|
49
|
+
return {
|
|
50
|
+
isError: true,
|
|
51
|
+
content: [{ type: "text", text: String(err) }],
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
1
2
|
import { readFile } from "node:fs/promises";
|
|
2
|
-
import { resolve
|
|
3
|
+
import { resolve } from "node:path";
|
|
3
4
|
import { z } from "zod";
|
|
4
|
-
import { createDrain } from "logpare";
|
|
5
5
|
const timeoutMs = (() => {
|
|
6
6
|
const env = process.env.readLogFileTimeoutMs;
|
|
7
7
|
if (!env)
|
|
@@ -13,33 +13,181 @@ const timeoutMs = (() => {
|
|
|
13
13
|
}
|
|
14
14
|
return n;
|
|
15
15
|
})();
|
|
16
|
-
const
|
|
17
|
-
|
|
16
|
+
const routingDepth = (() => {
|
|
17
|
+
const env = process.env.readLogFileRoutingDepth;
|
|
18
|
+
if (!env)
|
|
19
|
+
return 2;
|
|
20
|
+
const n = Number(env);
|
|
21
|
+
if (!Number.isInteger(n) || n < 1 || n > 5) {
|
|
22
|
+
process.stderr.write(`Invalid readLogFileRoutingDepth: "${env}". Must be 1-5.\n`);
|
|
23
|
+
process.exit(1);
|
|
24
|
+
}
|
|
25
|
+
return n;
|
|
26
|
+
})();
|
|
27
|
+
const WILDCARD = "<*>";
|
|
28
|
+
const WILDCARD_KEY = "<WILDCARD>";
|
|
29
|
+
const MAX_SAMPLES = 3;
|
|
30
|
+
function tokenize(line) {
|
|
31
|
+
return line.split(/(\s+|[{}()\[\],:;="'`<>])/g).filter((t) => t.trim());
|
|
32
|
+
}
|
|
33
|
+
function looksLikeVariable(token) {
|
|
34
|
+
if (token === WILDCARD)
|
|
35
|
+
return true;
|
|
36
|
+
const first = token.charAt(0);
|
|
37
|
+
if (first >= "0" && first <= "9")
|
|
38
|
+
return true;
|
|
39
|
+
if (/^[0-9a-fA-F]+$/.test(token) && token.length > 8)
|
|
40
|
+
return true;
|
|
41
|
+
return false;
|
|
42
|
+
}
|
|
43
|
+
function similarity(tokens, template) {
|
|
44
|
+
if (tokens.length !== template.length)
|
|
45
|
+
return 0;
|
|
46
|
+
let matches = 0;
|
|
47
|
+
for (let i = 0; i < tokens.length; i++) {
|
|
48
|
+
if (template[i] === WILDCARD || tokens[i] === template[i])
|
|
49
|
+
matches++;
|
|
50
|
+
}
|
|
51
|
+
return matches / tokens.length;
|
|
52
|
+
}
|
|
53
|
+
function mergeTokens(tokens, template) {
|
|
54
|
+
return template.map((t, i) => t === WILDCARD || t !== tokens[i] ? WILDCARD : t);
|
|
55
|
+
}
|
|
56
|
+
function extractVariables(tokens, template) {
|
|
57
|
+
const vars = [];
|
|
58
|
+
for (let i = 0; i < template.length; i++) {
|
|
59
|
+
if (template[i] === WILDCARD && tokens[i])
|
|
60
|
+
vars.push(tokens[i]);
|
|
61
|
+
}
|
|
62
|
+
return vars;
|
|
63
|
+
}
|
|
64
|
+
function tokensToPattern(tokens) {
|
|
65
|
+
return tokens.join(" ");
|
|
66
|
+
}
|
|
67
|
+
function getRouteKey(token) {
|
|
68
|
+
if (!token)
|
|
69
|
+
return WILDCARD_KEY;
|
|
70
|
+
return looksLikeVariable(token) ? WILDCARD_KEY : token;
|
|
71
|
+
}
|
|
72
|
+
function getRouteKeys(tokens, depth) {
|
|
73
|
+
const keys = [];
|
|
74
|
+
for (let i = 0; i < depth; i++) {
|
|
75
|
+
keys.push(getRouteKey(tokens[i]));
|
|
76
|
+
}
|
|
77
|
+
return keys;
|
|
78
|
+
}
|
|
79
|
+
function createNode() {
|
|
80
|
+
return { children: new Map(), templates: [] };
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Tree-based Drain algorithm with configurable routing depth.
|
|
84
|
+
* Structure: root → length → token[0] → token[1] → ... → token[depth-1] → templates[]
|
|
85
|
+
* Deeper routing prevents cross-contamination but increases memory usage.
|
|
86
|
+
*/
|
|
87
|
+
class DrainTree {
|
|
88
|
+
simThreshold;
|
|
89
|
+
depth;
|
|
90
|
+
root = new Map();
|
|
91
|
+
constructor(simThreshold, depth) {
|
|
92
|
+
this.simThreshold = simThreshold;
|
|
93
|
+
this.depth = depth;
|
|
94
|
+
}
|
|
95
|
+
navigate(length, keys, create) {
|
|
96
|
+
let lengthNode = this.root.get(length);
|
|
97
|
+
if (!lengthNode) {
|
|
98
|
+
if (!create)
|
|
99
|
+
return undefined;
|
|
100
|
+
lengthNode = createNode();
|
|
101
|
+
this.root.set(length, lengthNode);
|
|
102
|
+
}
|
|
103
|
+
let current = lengthNode;
|
|
104
|
+
for (const key of keys) {
|
|
105
|
+
let child = current.children.get(key);
|
|
106
|
+
if (!child) {
|
|
107
|
+
if (!create)
|
|
108
|
+
return undefined;
|
|
109
|
+
child = createNode();
|
|
110
|
+
current.children.set(key, child);
|
|
111
|
+
}
|
|
112
|
+
current = child;
|
|
113
|
+
}
|
|
114
|
+
return current;
|
|
115
|
+
}
|
|
116
|
+
addLine(line) {
|
|
117
|
+
const tokens = tokenize(line);
|
|
118
|
+
if (!tokens.length)
|
|
119
|
+
return;
|
|
120
|
+
const length = tokens.length;
|
|
121
|
+
const keys = getRouteKeys(tokens, this.depth);
|
|
122
|
+
const node = this.navigate(length, keys, false);
|
|
123
|
+
let bestMatch = null;
|
|
124
|
+
let bestSim = 0;
|
|
125
|
+
if (node) {
|
|
126
|
+
for (const template of node.templates) {
|
|
127
|
+
const sim = similarity(tokens, template.tokens);
|
|
128
|
+
if (sim >= this.simThreshold && sim > bestSim) {
|
|
129
|
+
bestSim = sim;
|
|
130
|
+
bestMatch = template;
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
if (bestMatch) {
|
|
135
|
+
bestMatch.tokens = mergeTokens(tokens, bestMatch.tokens);
|
|
136
|
+
bestMatch.pattern = tokensToPattern(bestMatch.tokens);
|
|
137
|
+
bestMatch.count++;
|
|
138
|
+
if (bestMatch.samples.length < MAX_SAMPLES) {
|
|
139
|
+
bestMatch.samples.push(extractVariables(tokens, bestMatch.tokens));
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
else {
|
|
143
|
+
const targetNode = this.navigate(length, keys, true);
|
|
144
|
+
targetNode.templates.push({
|
|
145
|
+
tokens,
|
|
146
|
+
pattern: tokensToPattern(tokens),
|
|
147
|
+
count: 1,
|
|
148
|
+
samples: [],
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
getTemplates() {
|
|
153
|
+
const result = [];
|
|
154
|
+
const collectFromNode = (node) => {
|
|
155
|
+
result.push(...node.templates);
|
|
156
|
+
for (const child of node.children.values()) {
|
|
157
|
+
collectFromNode(child);
|
|
158
|
+
}
|
|
159
|
+
};
|
|
160
|
+
for (const lengthNode of this.root.values()) {
|
|
161
|
+
collectFromNode(lengthNode);
|
|
162
|
+
}
|
|
163
|
+
return result;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
function compress(lines, simThreshold) {
|
|
167
|
+
const tree = new DrainTree(simThreshold, routingDepth);
|
|
168
|
+
for (const line of lines) {
|
|
169
|
+
tree.addLine(line);
|
|
170
|
+
}
|
|
171
|
+
return tree.getTemplates();
|
|
172
|
+
}
|
|
173
|
+
// --- MCP Tool ---
|
|
174
|
+
function hashTemplateId(pattern) {
|
|
175
|
+
return createHash("sha256").update(pattern).digest("base64url").slice(0, 12);
|
|
176
|
+
}
|
|
18
177
|
const schema = z.object({
|
|
19
178
|
path: z.string().min(1).describe("Path to the log file."),
|
|
20
|
-
format: z.enum(["summary", "detailed", "json"]).describe("Output format."),
|
|
21
|
-
depth: z.number().int().min(2).max(8).describe("Parse tree depth (2-8)."),
|
|
22
179
|
simThreshold: z
|
|
23
180
|
.number()
|
|
24
181
|
.min(0)
|
|
25
182
|
.max(1)
|
|
26
|
-
.describe("Similarity threshold (0-1)."),
|
|
27
|
-
tail: z
|
|
28
|
-
|
|
29
|
-
.int()
|
|
30
|
-
.min(1)
|
|
31
|
-
.optional()
|
|
32
|
-
.describe("Last N lines (first read only)."),
|
|
33
|
-
head: z
|
|
34
|
-
.number()
|
|
35
|
-
.int()
|
|
36
|
-
.min(1)
|
|
37
|
-
.optional()
|
|
38
|
-
.describe("First N lines (first read only)."),
|
|
183
|
+
.describe("Similarity threshold (0-1). Lower = more aggressive grouping."),
|
|
184
|
+
tail: z.number().int().min(1).optional().describe("Last N lines."),
|
|
185
|
+
head: z.number().int().min(1).optional().describe("First N lines."),
|
|
39
186
|
grep: z.string().optional().describe("Regex filter for lines."),
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
187
|
+
templateId: z
|
|
188
|
+
.string()
|
|
189
|
+
.optional()
|
|
190
|
+
.describe("Drill into a specific template by its hash ID."),
|
|
43
191
|
});
|
|
44
192
|
const ok = (text) => ({ content: [{ type: "text", text }] });
|
|
45
193
|
const err = (text) => ({
|
|
@@ -48,103 +196,78 @@ const err = (text) => ({
|
|
|
48
196
|
});
|
|
49
197
|
export function register(server) {
|
|
50
198
|
server.registerTool("readLogFile", {
|
|
51
|
-
description: "Compress a log file using semantic pattern extraction
|
|
199
|
+
description: "Compress a log file using the Drain algorithm for semantic pattern extraction. Groups similar lines into templates. Stateless — each call processes the file fresh. Template IDs are content-hashed so the same pattern always has the same ID.",
|
|
52
200
|
inputSchema: schema,
|
|
53
201
|
annotations: { readOnlyHint: true, openWorldHint: false },
|
|
54
202
|
}, async (input) => {
|
|
55
203
|
try {
|
|
56
|
-
return ok(await Promise.race([processLog(
|
|
204
|
+
return ok(await Promise.race([processLog(input), timeout()]));
|
|
57
205
|
}
|
|
58
206
|
catch (e) {
|
|
59
207
|
return err(String(e));
|
|
60
208
|
}
|
|
61
209
|
});
|
|
62
210
|
}
|
|
63
|
-
async function processLog(
|
|
211
|
+
async function processLog(input) {
|
|
64
212
|
const path = resolve(process.cwd(), input.path);
|
|
65
|
-
let state = drains.get(path);
|
|
66
213
|
if (input.head && input.tail) {
|
|
67
214
|
return "Cannot use both head and tail.";
|
|
68
215
|
}
|
|
69
|
-
if (state &&
|
|
70
|
-
(state.depth !== input.depth || state.simThreshold !== input.simThreshold)) {
|
|
71
|
-
return `Error: Drain exists with depth=${state.depth}, simThreshold=${state.simThreshold}. Flush first.`;
|
|
72
|
-
}
|
|
73
216
|
const content = await readFile(path, "utf-8");
|
|
74
217
|
let lines = content.split(/\r?\n/).filter(Boolean);
|
|
75
|
-
if (
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
lastLine: lines.length,
|
|
93
|
-
depth: input.depth,
|
|
94
|
-
simThreshold: input.simThreshold,
|
|
95
|
-
};
|
|
96
|
-
drains.set(path, state);
|
|
97
|
-
if (wasEmpty)
|
|
98
|
-
registerFlush(server);
|
|
99
|
-
return `${state.drain.getResult(input.format).formatted}\n\n[New drain. ${state.lastLine} lines. Use flushLogFile when done.]`;
|
|
100
|
-
}
|
|
101
|
-
const newLines = lines.slice(state.lastLine);
|
|
102
|
-
if (!newLines.length) {
|
|
103
|
-
return `${state.drain.getResult(input.format).formatted}\n\n[No new lines. Total: ${state.lastLine}]`;
|
|
218
|
+
if (input.head)
|
|
219
|
+
lines = lines.slice(0, input.head);
|
|
220
|
+
else if (input.tail)
|
|
221
|
+
lines = lines.slice(-input.tail);
|
|
222
|
+
if (input.grep)
|
|
223
|
+
lines = lines.filter((l) => new RegExp(input.grep).test(l));
|
|
224
|
+
if (!lines.length)
|
|
225
|
+
return "No log lines to process.";
|
|
226
|
+
const templates = compress(lines, input.simThreshold);
|
|
227
|
+
const templateMap = buildTemplateMap(templates);
|
|
228
|
+
if (input.templateId) {
|
|
229
|
+
const template = templateMap.get(input.templateId);
|
|
230
|
+
if (!template) {
|
|
231
|
+
const available = [...templateMap.keys()].slice(0, 10).join(", ");
|
|
232
|
+
return `Template "${input.templateId}" not found. Available: ${available}${templateMap.size > 10 ? ` (+${templateMap.size - 10} more)` : ""}`;
|
|
233
|
+
}
|
|
234
|
+
return formatDrillDown(template);
|
|
104
235
|
}
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
try {
|
|
117
|
-
flushTool = server.registerTool("flushLogFile", {
|
|
118
|
-
description: "Release a log drain to free memory. Next readLogFile creates fresh drain.",
|
|
119
|
-
inputSchema: flushSchema,
|
|
120
|
-
annotations: { destructiveHint: false, idempotentHint: true },
|
|
121
|
-
}, async (input) => {
|
|
122
|
-
try {
|
|
123
|
-
const path = resolve(process.cwd(), input.path);
|
|
124
|
-
const state = drains.get(path);
|
|
125
|
-
if (!state) {
|
|
126
|
-
if (!drains.size)
|
|
127
|
-
return ok("No active drains.");
|
|
128
|
-
return ok(`No drain for "${basename(path)}". Active: ${[...drains.keys()].map((p) => basename(p)).join(", ")}`);
|
|
129
|
-
}
|
|
130
|
-
const { totalClusters, lastLine } = {
|
|
131
|
-
totalClusters: state.drain.totalClusters,
|
|
132
|
-
lastLine: state.lastLine,
|
|
133
|
-
};
|
|
134
|
-
drains.delete(path);
|
|
135
|
-
if (!drains.size && flushTool) {
|
|
136
|
-
flushTool.remove();
|
|
137
|
-
flushTool = null;
|
|
138
|
-
}
|
|
139
|
-
return ok(`Flushed ${basename(path)}. Released ${totalClusters} templates from ${lastLine} lines.`);
|
|
140
|
-
}
|
|
141
|
-
catch (e) {
|
|
142
|
-
return err(String(e));
|
|
143
|
-
}
|
|
236
|
+
return formatOverview(templateMap, lines.length);
|
|
237
|
+
}
|
|
238
|
+
function buildTemplateMap(templates) {
|
|
239
|
+
const map = new Map();
|
|
240
|
+
for (const t of templates) {
|
|
241
|
+
const id = hashTemplateId(t.pattern);
|
|
242
|
+
map.set(id, {
|
|
243
|
+
id,
|
|
244
|
+
pattern: t.pattern,
|
|
245
|
+
count: t.count,
|
|
246
|
+
samples: t.samples,
|
|
144
247
|
});
|
|
145
|
-
server.sendToolListChanged();
|
|
146
248
|
}
|
|
147
|
-
|
|
249
|
+
return map;
|
|
250
|
+
}
|
|
251
|
+
function formatOverview(templateMap, lineCount) {
|
|
252
|
+
const sorted = [...templateMap.values()].sort((a, b) => b.count - a.count);
|
|
253
|
+
const reduction = Math.round((1 - templateMap.size / lineCount) * 100);
|
|
254
|
+
const header = `=== Log Compression ===\n${lineCount} lines → ${templateMap.size} templates (${reduction}% reduction)\n`;
|
|
255
|
+
const top20 = sorted.slice(0, 20);
|
|
256
|
+
const topLines = top20
|
|
257
|
+
.map((t) => `${t.id} [${t.count}x] ${t.pattern}`)
|
|
258
|
+
.join("\n");
|
|
259
|
+
const remaining = sorted.length - 20;
|
|
260
|
+
const footer = remaining > 0 ? `\n... and ${remaining} more templates` : "";
|
|
261
|
+
return `${header}\n${topLines}${footer}`;
|
|
262
|
+
}
|
|
263
|
+
function formatDrillDown(template) {
|
|
264
|
+
const header = `Template: ${template.id}\nPattern: ${template.pattern}\nMatches: ${template.count}\n`;
|
|
265
|
+
if (!template.samples.length)
|
|
266
|
+
return header;
|
|
267
|
+
const samples = template.samples
|
|
268
|
+
.map((vars, i) => ` ${i + 1}. Variables: ${vars.join(", ")}`)
|
|
269
|
+
.join("\n");
|
|
270
|
+
return `${header}\nSample variable captures:\n${samples}`;
|
|
148
271
|
}
|
|
149
272
|
function timeout() {
|
|
150
273
|
return new Promise((_, rej) => setTimeout(() => rej(new Error(`Timeout: ${timeoutMs}ms`)), timeoutMs));
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "mcp-multitool",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.12",
|
|
4
4
|
"description": "MCP server with file operations (delete, move) and timing utilities.",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
@@ -31,7 +31,6 @@
|
|
|
31
31
|
"dependencies": {
|
|
32
32
|
"@ast-grep/napi": "^0.42.1",
|
|
33
33
|
"@modelcontextprotocol/sdk": "1.29.0",
|
|
34
|
-
"logpare": "^0.1.0",
|
|
35
34
|
"zod": "^3.24.0"
|
|
36
35
|
},
|
|
37
36
|
"devDependencies": {
|