labgate 0.5.10 → 0.5.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +59 -4
- package/dist/cli.js +425 -27
- package/dist/cli.js.map +1 -1
- package/dist/lib/cluster-mcp.d.ts +33 -0
- package/dist/lib/cluster-mcp.js +313 -0
- package/dist/lib/cluster-mcp.js.map +1 -0
- package/dist/lib/config.d.ts +1 -0
- package/dist/lib/config.js +11 -4
- package/dist/lib/config.js.map +1 -1
- package/dist/lib/container.d.ts +10 -0
- package/dist/lib/container.js +195 -32
- package/dist/lib/container.js.map +1 -1
- package/dist/lib/dataset-mcp.d.ts +20 -0
- package/dist/lib/dataset-mcp.js +809 -0
- package/dist/lib/dataset-mcp.js.map +1 -0
- package/dist/lib/feedback.js +15 -3
- package/dist/lib/feedback.js.map +1 -1
- package/dist/lib/init.js +2 -2
- package/dist/lib/results-mcp.d.ts +9 -0
- package/dist/lib/results-mcp.js +205 -0
- package/dist/lib/results-mcp.js.map +1 -0
- package/dist/lib/results-store.d.ts +61 -0
- package/dist/lib/results-store.js +319 -0
- package/dist/lib/results-store.js.map +1 -0
- package/dist/lib/slurm-cli-passthrough.d.ts +25 -0
- package/dist/lib/slurm-cli-passthrough.js +330 -0
- package/dist/lib/slurm-cli-passthrough.js.map +1 -0
- package/dist/lib/slurm-mcp.js +1 -1
- package/dist/lib/slurm-mcp.js.map +1 -1
- package/dist/lib/test/integration-harness.d.ts +4 -0
- package/dist/lib/test/integration-harness.js +14 -2
- package/dist/lib/test/integration-harness.js.map +1 -1
- package/dist/lib/ui.html +2068 -351
- package/dist/lib/ui.js +701 -0
- package/dist/lib/ui.js.map +1 -1
- package/dist/mcp-bundles/cluster-mcp.bundle.mjs +30235 -0
- package/dist/mcp-bundles/dataset-mcp.bundle.mjs +30971 -0
- package/dist/mcp-bundles/results-mcp.bundle.mjs +30449 -0
- package/dist/mcp-bundles/slurm-mcp.bundle.mjs +30501 -0
- package/package.json +4 -2
|
@@ -0,0 +1,809 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* LabGate Dataset MCP Server
|
|
4
|
+
*
|
|
5
|
+
* A Model Context Protocol server that exposes dataset browsing and
|
|
6
|
+
* inspection tools to Claude Code. Runs as a standalone process using
|
|
7
|
+
* stdio transport.
|
|
8
|
+
*
|
|
9
|
+
* Usage: node dist/lib/dataset-mcp.js
|
|
10
|
+
*
|
|
11
|
+
* Reads dataset configuration from ~/.labgate/config.json.
|
|
12
|
+
*/
|
|
13
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
14
|
+
exports.resolvePathWithinRoot = resolvePathWithinRoot;
|
|
15
|
+
exports.main = main;
|
|
16
|
+
const mcp_js_1 = require("@modelcontextprotocol/sdk/server/mcp.js");
|
|
17
|
+
const stdio_js_1 = require("@modelcontextprotocol/sdk/server/stdio.js");
|
|
18
|
+
const config_js_1 = require("./config.js");
|
|
19
|
+
const fs_1 = require("fs");
|
|
20
|
+
const path_1 = require("path");
|
|
21
|
+
const os_1 = require("os");
|
|
22
|
+
const zod_1 = require("zod");
|
|
23
|
+
// ── Helpers ──────────────────────────────────────────────
|
|
24
|
+
function resolveDatasetPath(ds) {
|
|
25
|
+
if (process.env.LABGATE_CONTAINER_MODE === '1') {
|
|
26
|
+
return `/datasets/${ds.name}`;
|
|
27
|
+
}
|
|
28
|
+
return ds.path.replace(/^~/, (0, os_1.homedir)());
|
|
29
|
+
}
|
|
30
|
+
function formatBytes(bytes) {
|
|
31
|
+
if (bytes === 0)
|
|
32
|
+
return '0 B';
|
|
33
|
+
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
|
34
|
+
const i = Math.floor(Math.log(bytes) / Math.log(1024));
|
|
35
|
+
const val = bytes / Math.pow(1024, i);
|
|
36
|
+
return `${val.toFixed(i === 0 ? 0 : 1)} ${units[i]}`;
|
|
37
|
+
}
|
|
38
|
+
function clampInt(value, fallback, min, max) {
|
|
39
|
+
if (!Number.isFinite(value))
|
|
40
|
+
return fallback;
|
|
41
|
+
return Math.min(max, Math.max(min, Math.floor(value)));
|
|
42
|
+
}
|
|
43
|
+
function isPathInside(rootPath, targetPath) {
|
|
44
|
+
const rel = (0, path_1.relative)(rootPath, targetPath);
|
|
45
|
+
return rel === '' || (!rel.startsWith('..') && !(0, path_1.isAbsolute)(rel));
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Resolve a user-provided path under a root directory and reject traversal.
|
|
49
|
+
* Returns null when the resolved path escapes the root.
|
|
50
|
+
*
|
|
51
|
+
* If the target exists, symlinks are resolved and containment is re-checked
|
|
52
|
+
* to prevent escaping via symlink hops.
|
|
53
|
+
*/
|
|
54
|
+
function resolvePathWithinRoot(root, userPath) {
|
|
55
|
+
const resolvedRoot = (0, path_1.resolve)(root);
|
|
56
|
+
const resolvedTarget = (0, path_1.resolve)(resolvedRoot, userPath || '.');
|
|
57
|
+
if (!isPathInside(resolvedRoot, resolvedTarget))
|
|
58
|
+
return null;
|
|
59
|
+
if ((0, fs_1.existsSync)(resolvedTarget)) {
|
|
60
|
+
try {
|
|
61
|
+
const canonicalRoot = (0, fs_1.existsSync)(resolvedRoot) ? (0, fs_1.realpathSync)(resolvedRoot) : resolvedRoot;
|
|
62
|
+
const canonicalTarget = (0, fs_1.realpathSync)(resolvedTarget);
|
|
63
|
+
if (!isPathInside(canonicalRoot, canonicalTarget))
|
|
64
|
+
return null;
|
|
65
|
+
}
|
|
66
|
+
catch {
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return resolvedTarget;
|
|
71
|
+
}
|
|
72
|
+
function canAccess(path, mode) {
|
|
73
|
+
try {
|
|
74
|
+
(0, fs_1.accessSync)(path, mode);
|
|
75
|
+
return true;
|
|
76
|
+
}
|
|
77
|
+
catch {
|
|
78
|
+
return false;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
function tailReadFile(filePath, maxLines) {
|
|
82
|
+
if (!(0, fs_1.existsSync)(filePath))
|
|
83
|
+
return '';
|
|
84
|
+
const st = (0, fs_1.statSync)(filePath);
|
|
85
|
+
const maxBytes = 1_048_576; // 1 MB
|
|
86
|
+
const readSize = Math.min(st.size, maxBytes);
|
|
87
|
+
if (readSize === 0)
|
|
88
|
+
return '';
|
|
89
|
+
const buf = Buffer.alloc(readSize);
|
|
90
|
+
let fd = null;
|
|
91
|
+
try {
|
|
92
|
+
fd = (0, fs_1.openSync)(filePath, 'r');
|
|
93
|
+
(0, fs_1.readSync)(fd, buf, 0, readSize, Math.max(0, st.size - readSize));
|
|
94
|
+
}
|
|
95
|
+
finally {
|
|
96
|
+
if (fd !== null) {
|
|
97
|
+
try {
|
|
98
|
+
(0, fs_1.closeSync)(fd);
|
|
99
|
+
}
|
|
100
|
+
catch { /* best effort */ }
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
let text = buf.toString('utf-8');
|
|
104
|
+
if (st.size > readSize) {
|
|
105
|
+
const firstNewline = text.indexOf('\n');
|
|
106
|
+
if (firstNewline >= 0)
|
|
107
|
+
text = text.slice(firstNewline + 1);
|
|
108
|
+
}
|
|
109
|
+
const lines = text.split('\n');
|
|
110
|
+
return lines.slice(-maxLines).join('\n');
|
|
111
|
+
}
|
|
112
|
+
/** Recursively walk a directory up to a depth limit, collecting file info. */
|
|
113
|
+
function walkDir(dir, maxDepth, currentDepth = 0) {
|
|
114
|
+
const results = [];
|
|
115
|
+
if (currentDepth > maxDepth)
|
|
116
|
+
return results;
|
|
117
|
+
let entries;
|
|
118
|
+
try {
|
|
119
|
+
entries = (0, fs_1.readdirSync)(dir);
|
|
120
|
+
}
|
|
121
|
+
catch {
|
|
122
|
+
return results;
|
|
123
|
+
}
|
|
124
|
+
for (const entry of entries) {
|
|
125
|
+
// Skip hidden files at top level to reduce noise
|
|
126
|
+
if (entry.startsWith('.') && currentDepth === 0)
|
|
127
|
+
continue;
|
|
128
|
+
const fullPath = (0, path_1.join)(dir, entry);
|
|
129
|
+
try {
|
|
130
|
+
const st = (0, fs_1.statSync)(fullPath);
|
|
131
|
+
results.push({ path: fullPath, size: st.size, isDir: st.isDirectory() });
|
|
132
|
+
if (st.isDirectory() && currentDepth < maxDepth) {
|
|
133
|
+
results.push(...walkDir(fullPath, maxDepth, currentDepth + 1));
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
catch {
|
|
137
|
+
// Skip entries we can't stat (permission errors, etc.)
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
return results;
|
|
141
|
+
}
|
|
142
|
+
/** Glob-style matching (simple: supports * and ?) */
|
|
143
|
+
function simpleMatch(pattern, text) {
|
|
144
|
+
const regex = new RegExp('^' +
|
|
145
|
+
pattern
|
|
146
|
+
.replace(/[.+^${}()|[\]\\]/g, '\\$&')
|
|
147
|
+
.replace(/\*/g, '.*')
|
|
148
|
+
.replace(/\?/g, '.') +
|
|
149
|
+
'$', 'i');
|
|
150
|
+
return regex.test(text);
|
|
151
|
+
}
|
|
152
|
+
function datasetNameFromPath(path) {
|
|
153
|
+
return (0, path_1.basename)(path).replace(/[^a-zA-Z0-9._-]/g, '-');
|
|
154
|
+
}
|
|
155
|
+
function validDatasetName(name) {
|
|
156
|
+
return /^[a-zA-Z0-9][a-zA-Z0-9._-]*$/.test(name);
|
|
157
|
+
}
|
|
158
|
+
function readRawConfig() {
|
|
159
|
+
const configPath = (0, config_js_1.getConfigPath)();
|
|
160
|
+
try {
|
|
161
|
+
const rawText = (0, fs_1.readFileSync)(configPath, 'utf-8');
|
|
162
|
+
const stripped = rawText
|
|
163
|
+
.split('\n')
|
|
164
|
+
.filter((line) => !line.trimStart().startsWith('//'))
|
|
165
|
+
.join('\n');
|
|
166
|
+
const rawConfig = JSON.parse(stripped);
|
|
167
|
+
if (!Array.isArray(rawConfig.datasets))
|
|
168
|
+
rawConfig.datasets = [];
|
|
169
|
+
return { configPath, rawConfig };
|
|
170
|
+
}
|
|
171
|
+
catch {
|
|
172
|
+
return { error: 'Could not read config file.' };
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
function writeRawConfig(configPath, rawConfig) {
|
|
176
|
+
try {
|
|
177
|
+
(0, config_js_1.ensurePrivateDir)((0, path_1.dirname)(configPath));
|
|
178
|
+
(0, fs_1.writeFileSync)(configPath, JSON.stringify(rawConfig, null, 2), 'utf-8');
|
|
179
|
+
(0, config_js_1.ensurePrivateFile)(configPath);
|
|
180
|
+
return { ok: true };
|
|
181
|
+
}
|
|
182
|
+
catch (err) {
|
|
183
|
+
return { error: `Failed to write config: ${err.message}` };
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
function findDatasetIndex(rawConfig, name) {
|
|
187
|
+
if (!Array.isArray(rawConfig.datasets))
|
|
188
|
+
return -1;
|
|
189
|
+
return rawConfig.datasets.findIndex((d) => d?.name && String(d.name).toLowerCase() === name.toLowerCase());
|
|
190
|
+
}
|
|
191
|
+
function findDatasetNameConflict(rawConfig, name, excludeIndex = -1) {
|
|
192
|
+
if (!Array.isArray(rawConfig.datasets))
|
|
193
|
+
return null;
|
|
194
|
+
return rawConfig.datasets.find((d, i) => i !== excludeIndex && d?.name && String(d.name).toLowerCase() === name.toLowerCase()) || null;
|
|
195
|
+
}
|
|
196
|
+
function findDatasetPathConflict(rawConfig, path, excludeIndex = -1) {
|
|
197
|
+
if (!Array.isArray(rawConfig.datasets))
|
|
198
|
+
return null;
|
|
199
|
+
const normalized = (0, path_1.resolve)(path.replace(/^~/, (0, os_1.homedir)()));
|
|
200
|
+
return rawConfig.datasets.find((d, i) => i !== excludeIndex && d?.path && (0, path_1.resolve)(String(d.path).replace(/^~/, (0, os_1.homedir)())) === normalized) || null;
|
|
201
|
+
}
|
|
202
|
+
function registeredSummary(ds) {
|
|
203
|
+
return `${ds.name} (${ds.mode}) at ${ds.path}`;
|
|
204
|
+
}
|
|
205
|
+
function parseAbsolutePath(inputPath) {
|
|
206
|
+
if (!inputPath || !inputPath.startsWith('/')) {
|
|
207
|
+
return { error: 'Path must be an absolute path starting with /.' };
|
|
208
|
+
}
|
|
209
|
+
return { resolved: inputPath.replace(/^~/, (0, os_1.homedir)()) };
|
|
210
|
+
}
|
|
211
|
+
function ensureDirectory(path) {
|
|
212
|
+
if (!(0, fs_1.existsSync)(path))
|
|
213
|
+
return { error: `Path does not exist: ${path}` };
|
|
214
|
+
const st = (0, fs_1.statSync)(path);
|
|
215
|
+
if (!st.isDirectory())
|
|
216
|
+
return { error: `Path is not a directory: ${path}` };
|
|
217
|
+
return { ok: true };
|
|
218
|
+
}
|
|
219
|
+
function asJson(payload) {
|
|
220
|
+
return {
|
|
221
|
+
content: [{ type: 'text', text: JSON.stringify(payload, null, 2) }],
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
function asText(text) {
|
|
225
|
+
return {
|
|
226
|
+
content: [{ type: 'text', text }],
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
function asError(text) {
|
|
230
|
+
return {
|
|
231
|
+
content: [{ type: 'text', text }],
|
|
232
|
+
isError: true,
|
|
233
|
+
};
|
|
234
|
+
}
|
|
235
|
+
// ── MCP Server ───────────────────────────────────────────
|
|
236
|
+
async function main() {
|
|
237
|
+
const server = new mcp_js_1.McpServer({
|
|
238
|
+
name: 'labgate-datasets',
|
|
239
|
+
version: '1.0.0',
|
|
240
|
+
}, {
|
|
241
|
+
capabilities: {
|
|
242
|
+
tools: {},
|
|
243
|
+
},
|
|
244
|
+
instructions: 'LabGate Dataset tools. Use these tools to discover, browse, and inspect ' +
|
|
245
|
+
'datasets mounted in the LabGate sandbox. Datasets are mounted at /datasets/{name} ' +
|
|
246
|
+
'inside the container and are accessible on the host via their configured paths.',
|
|
247
|
+
});
|
|
248
|
+
// Helper to load fresh datasets from config each time
|
|
249
|
+
function getDatasets() {
|
|
250
|
+
try {
|
|
251
|
+
const config = (0, config_js_1.loadConfig)();
|
|
252
|
+
return config.datasets || [];
|
|
253
|
+
}
|
|
254
|
+
catch {
|
|
255
|
+
return [];
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
// ── Tool: list_datasets ──────────────────────────────────
|
|
259
|
+
server.registerTool('list_datasets', {
|
|
260
|
+
title: 'List Datasets',
|
|
261
|
+
description: 'List all configured datasets with their names, paths, access modes, ' +
|
|
262
|
+
'descriptions, and basic stats (total size, file count).',
|
|
263
|
+
inputSchema: {},
|
|
264
|
+
}, async () => {
|
|
265
|
+
const datasets = getDatasets();
|
|
266
|
+
if (datasets.length === 0) {
|
|
267
|
+
return {
|
|
268
|
+
content: [{ type: 'text', text: 'No datasets configured.' }],
|
|
269
|
+
};
|
|
270
|
+
}
|
|
271
|
+
const summaries = datasets.map((ds) => {
|
|
272
|
+
const hostPath = resolveDatasetPath(ds);
|
|
273
|
+
const exists = (0, fs_1.existsSync)(hostPath);
|
|
274
|
+
let fileCount = 0;
|
|
275
|
+
let totalSize = 0;
|
|
276
|
+
if (exists) {
|
|
277
|
+
try {
|
|
278
|
+
const entries = walkDir(hostPath, 1);
|
|
279
|
+
const files = entries.filter((e) => !e.isDir);
|
|
280
|
+
fileCount = files.length;
|
|
281
|
+
totalSize = files.reduce((sum, f) => sum + f.size, 0);
|
|
282
|
+
}
|
|
283
|
+
catch { /* best effort */ }
|
|
284
|
+
}
|
|
285
|
+
return {
|
|
286
|
+
name: ds.name,
|
|
287
|
+
container_path: `/datasets/${ds.name}`,
|
|
288
|
+
host_path: hostPath,
|
|
289
|
+
mode: ds.mode,
|
|
290
|
+
description: ds.description || null,
|
|
291
|
+
exists,
|
|
292
|
+
file_count: fileCount,
|
|
293
|
+
total_size: formatBytes(totalSize),
|
|
294
|
+
total_size_bytes: totalSize,
|
|
295
|
+
};
|
|
296
|
+
});
|
|
297
|
+
return asJson(summaries);
|
|
298
|
+
});
|
|
299
|
+
// ── Tool: inspect_dataset ────────────────────────────────
|
|
300
|
+
server.registerTool('inspect_dataset', {
|
|
301
|
+
title: 'Inspect Dataset',
|
|
302
|
+
description: 'Browse the contents of a dataset directory. Returns a listing of files and ' +
|
|
303
|
+
'subdirectories with sizes. Use the path parameter to explore subdirectories.',
|
|
304
|
+
inputSchema: {
|
|
305
|
+
name: zod_1.z.string().describe('Dataset name (as configured in LabGate)'),
|
|
306
|
+
path: zod_1.z.string().optional().describe('Relative path within the dataset to browse (default: root)'),
|
|
307
|
+
depth: zod_1.z.number().int().min(0).max(5).optional().default(1).describe('How many levels deep to list (0 = current dir only, max 5)'),
|
|
308
|
+
},
|
|
309
|
+
}, async ({ name, path: relPath, depth }) => {
|
|
310
|
+
const datasets = getDatasets();
|
|
311
|
+
const ds = datasets.find((d) => d.name === name);
|
|
312
|
+
if (!ds) {
|
|
313
|
+
return asError(`Dataset "${name}" not found. Use list_datasets to see available datasets.`);
|
|
314
|
+
}
|
|
315
|
+
const hostRoot = resolveDatasetPath(ds);
|
|
316
|
+
const targetDir = resolvePathWithinRoot(hostRoot, relPath);
|
|
317
|
+
if (!targetDir) {
|
|
318
|
+
return asError('Path traversal is not allowed.');
|
|
319
|
+
}
|
|
320
|
+
if (!(0, fs_1.existsSync)(targetDir)) {
|
|
321
|
+
return asError(`Path does not exist: ${relPath || '/'}`);
|
|
322
|
+
}
|
|
323
|
+
const safeDepth = clampInt(depth, 1, 0, 5);
|
|
324
|
+
const st = (0, fs_1.statSync)(targetDir);
|
|
325
|
+
if (!st.isDirectory()) {
|
|
326
|
+
// It's a file — return file info
|
|
327
|
+
return asJson({
|
|
328
|
+
type: 'file',
|
|
329
|
+
name: (0, path_1.basename)(targetDir),
|
|
330
|
+
size: formatBytes(st.size),
|
|
331
|
+
size_bytes: st.size,
|
|
332
|
+
modified: st.mtime.toISOString(),
|
|
333
|
+
extension: (0, path_1.extname)(targetDir),
|
|
334
|
+
});
|
|
335
|
+
}
|
|
336
|
+
let entries;
|
|
337
|
+
try {
|
|
338
|
+
entries = (0, fs_1.readdirSync)(targetDir);
|
|
339
|
+
}
|
|
340
|
+
catch (err) {
|
|
341
|
+
return asError(`Cannot read directory: ${err.message}`);
|
|
342
|
+
}
|
|
343
|
+
const items = entries
|
|
344
|
+
.sort()
|
|
345
|
+
.map((entry) => {
|
|
346
|
+
const fullPath = (0, path_1.join)(targetDir, entry);
|
|
347
|
+
try {
|
|
348
|
+
const est = (0, fs_1.statSync)(fullPath);
|
|
349
|
+
const item = {
|
|
350
|
+
name: entry,
|
|
351
|
+
type: est.isDirectory() ? 'directory' : 'file',
|
|
352
|
+
size: formatBytes(est.size),
|
|
353
|
+
size_bytes: est.size,
|
|
354
|
+
modified: est.mtime.toISOString(),
|
|
355
|
+
};
|
|
356
|
+
if (!est.isDirectory()) {
|
|
357
|
+
item.extension = (0, path_1.extname)(entry);
|
|
358
|
+
}
|
|
359
|
+
if (est.isDirectory() && safeDepth > 0) {
|
|
360
|
+
try {
|
|
361
|
+
const subEntries = (0, fs_1.readdirSync)(fullPath);
|
|
362
|
+
item.child_count = subEntries.length;
|
|
363
|
+
}
|
|
364
|
+
catch { /* permission error */ }
|
|
365
|
+
}
|
|
366
|
+
return item;
|
|
367
|
+
}
|
|
368
|
+
catch {
|
|
369
|
+
return { name: entry, type: 'unknown', error: 'cannot stat' };
|
|
370
|
+
}
|
|
371
|
+
})
|
|
372
|
+
.slice(0, 200); // Cap at 200 entries
|
|
373
|
+
const browsePath = relPath || '/';
|
|
374
|
+
return asJson({
|
|
375
|
+
dataset: name,
|
|
376
|
+
container_path: `/datasets/${name}${relPath ? '/' + relPath : ''}`,
|
|
377
|
+
browsing: browsePath,
|
|
378
|
+
total_entries: entries.length,
|
|
379
|
+
shown: items.length,
|
|
380
|
+
items,
|
|
381
|
+
});
|
|
382
|
+
});
|
|
383
|
+
// ── Tool: search_dataset ─────────────────────────────────
|
|
384
|
+
server.registerTool('search_dataset', {
|
|
385
|
+
title: 'Search Dataset Files',
|
|
386
|
+
description: 'Search for files within a dataset by name pattern (glob-style: * and ? wildcards). ' +
|
|
387
|
+
'Returns matching files with paths and sizes.',
|
|
388
|
+
inputSchema: {
|
|
389
|
+
name: zod_1.z.string().describe('Dataset name'),
|
|
390
|
+
pattern: zod_1.z.string().describe('Filename pattern to match (e.g., "*.csv", "sample_?.fastq.gz")'),
|
|
391
|
+
max_results: zod_1.z.number().int().min(1).max(500).optional().default(50).describe('Maximum results to return'),
|
|
392
|
+
},
|
|
393
|
+
}, async ({ name, pattern, max_results }) => {
|
|
394
|
+
const datasets = getDatasets();
|
|
395
|
+
const ds = datasets.find((d) => d.name === name);
|
|
396
|
+
if (!ds) {
|
|
397
|
+
return asError(`Dataset "${name}" not found. Use list_datasets to see available datasets.`);
|
|
398
|
+
}
|
|
399
|
+
const hostRoot = resolveDatasetPath(ds);
|
|
400
|
+
if (!(0, fs_1.existsSync)(hostRoot)) {
|
|
401
|
+
return asError(`Dataset path does not exist: ${hostRoot}`);
|
|
402
|
+
}
|
|
403
|
+
const safeMax = clampInt(max_results, 50, 1, 500);
|
|
404
|
+
const allFiles = walkDir(hostRoot, 10);
|
|
405
|
+
const matches = allFiles
|
|
406
|
+
.filter((f) => !f.isDir && simpleMatch(pattern, (0, path_1.basename)(f.path)))
|
|
407
|
+
.slice(0, safeMax)
|
|
408
|
+
.map((f) => ({
|
|
409
|
+
name: (0, path_1.basename)(f.path),
|
|
410
|
+
relative_path: f.path.slice(hostRoot.length + 1),
|
|
411
|
+
container_path: `/datasets/${ds.name}/${f.path.slice(hostRoot.length + 1)}`,
|
|
412
|
+
size: formatBytes(f.size),
|
|
413
|
+
size_bytes: f.size,
|
|
414
|
+
}));
|
|
415
|
+
if (matches.length === 0) {
|
|
416
|
+
return asText(`No files matching "${pattern}" found in dataset "${name}".`);
|
|
417
|
+
}
|
|
418
|
+
return asJson({
|
|
419
|
+
dataset: name,
|
|
420
|
+
pattern,
|
|
421
|
+
match_count: matches.length,
|
|
422
|
+
matches,
|
|
423
|
+
});
|
|
424
|
+
});
|
|
425
|
+
// ── Tool: get_dataset_summary ────────────────────────────
|
|
426
|
+
server.registerTool('get_dataset_summary', {
|
|
427
|
+
title: 'Dataset Summary Statistics',
|
|
428
|
+
description: 'Get summary statistics for a dataset: total size, file count, breakdown by ' +
|
|
429
|
+
'file extension, largest files, and directory structure overview.',
|
|
430
|
+
inputSchema: {
|
|
431
|
+
name: zod_1.z.string().describe('Dataset name'),
|
|
432
|
+
},
|
|
433
|
+
}, async ({ name }) => {
|
|
434
|
+
const datasets = getDatasets();
|
|
435
|
+
const ds = datasets.find((d) => d.name === name);
|
|
436
|
+
if (!ds) {
|
|
437
|
+
return asError(`Dataset "${name}" not found. Use list_datasets to see available datasets.`);
|
|
438
|
+
}
|
|
439
|
+
const hostRoot = resolveDatasetPath(ds);
|
|
440
|
+
if (!(0, fs_1.existsSync)(hostRoot)) {
|
|
441
|
+
return asError(`Dataset path does not exist: ${hostRoot}`);
|
|
442
|
+
}
|
|
443
|
+
const allEntries = walkDir(hostRoot, 10);
|
|
444
|
+
const files = allEntries.filter((e) => !e.isDir);
|
|
445
|
+
const dirs = allEntries.filter((e) => e.isDir);
|
|
446
|
+
const totalSize = files.reduce((sum, f) => sum + f.size, 0);
|
|
447
|
+
// Breakdown by extension
|
|
448
|
+
const extCounts = {};
|
|
449
|
+
for (const f of files) {
|
|
450
|
+
const ext = (0, path_1.extname)(f.path).toLowerCase() || '(no extension)';
|
|
451
|
+
if (!extCounts[ext])
|
|
452
|
+
extCounts[ext] = { count: 0, size: 0 };
|
|
453
|
+
extCounts[ext].count++;
|
|
454
|
+
extCounts[ext].size += f.size;
|
|
455
|
+
}
|
|
456
|
+
const byExtension = Object.entries(extCounts)
|
|
457
|
+
.sort((a, b) => b[1].size - a[1].size)
|
|
458
|
+
.slice(0, 20)
|
|
459
|
+
.map(([ext, info]) => ({
|
|
460
|
+
extension: ext,
|
|
461
|
+
count: info.count,
|
|
462
|
+
total_size: formatBytes(info.size),
|
|
463
|
+
}));
|
|
464
|
+
// Top 10 largest files
|
|
465
|
+
const largestFiles = files
|
|
466
|
+
.sort((a, b) => b.size - a.size)
|
|
467
|
+
.slice(0, 10)
|
|
468
|
+
.map((f) => ({
|
|
469
|
+
name: (0, path_1.basename)(f.path),
|
|
470
|
+
relative_path: f.path.slice(hostRoot.length + 1),
|
|
471
|
+
size: formatBytes(f.size),
|
|
472
|
+
}));
|
|
473
|
+
// Top-level directory names
|
|
474
|
+
const topLevelDirs = dirs
|
|
475
|
+
.filter((d) => {
|
|
476
|
+
const rel = d.path.slice(hostRoot.length + 1);
|
|
477
|
+
return !rel.includes('/') && !rel.includes('\\');
|
|
478
|
+
})
|
|
479
|
+
.map((d) => (0, path_1.basename)(d.path))
|
|
480
|
+
.sort();
|
|
481
|
+
return asJson({
|
|
482
|
+
dataset: name,
|
|
483
|
+
container_path: `/datasets/${name}`,
|
|
484
|
+
host_path: hostRoot,
|
|
485
|
+
mode: ds.mode,
|
|
486
|
+
description: ds.description || null,
|
|
487
|
+
total_files: files.length,
|
|
488
|
+
total_directories: dirs.length,
|
|
489
|
+
total_size: formatBytes(totalSize),
|
|
490
|
+
total_size_bytes: totalSize,
|
|
491
|
+
by_extension: byExtension,
|
|
492
|
+
largest_files: largestFiles,
|
|
493
|
+
top_level_directories: topLevelDirs,
|
|
494
|
+
});
|
|
495
|
+
});
|
|
496
|
+
// ── Tool: read_dataset_file ──────────────────────────────
|
|
497
|
+
server.registerTool('read_dataset_file', {
|
|
498
|
+
title: 'Read Dataset File',
|
|
499
|
+
description: 'Read the contents of a text file within a dataset (last N lines). ' +
|
|
500
|
+
'Useful for previewing CSV headers, README files, config files, etc.',
|
|
501
|
+
inputSchema: {
|
|
502
|
+
name: zod_1.z.string().describe('Dataset name'),
|
|
503
|
+
path: zod_1.z.string().describe('Relative path to the file within the dataset'),
|
|
504
|
+
tail_lines: zod_1.z.number().int().min(1).max(5000).optional().default(50).describe('Number of lines to read from the end of the file'),
|
|
505
|
+
},
|
|
506
|
+
}, async ({ name, path: relPath, tail_lines }) => {
|
|
507
|
+
const datasets = getDatasets();
|
|
508
|
+
const ds = datasets.find((d) => d.name === name);
|
|
509
|
+
if (!ds) {
|
|
510
|
+
return asError(`Dataset "${name}" not found. Use list_datasets to see available datasets.`);
|
|
511
|
+
}
|
|
512
|
+
const hostRoot = resolveDatasetPath(ds);
|
|
513
|
+
const filePath = resolvePathWithinRoot(hostRoot, relPath);
|
|
514
|
+
if (!filePath) {
|
|
515
|
+
return asError('Path traversal is not allowed.');
|
|
516
|
+
}
|
|
517
|
+
if (!(0, fs_1.existsSync)(filePath)) {
|
|
518
|
+
return asError(`File not found: ${relPath}`);
|
|
519
|
+
}
|
|
520
|
+
const st = (0, fs_1.statSync)(filePath);
|
|
521
|
+
if (st.isDirectory()) {
|
|
522
|
+
return asError(`"${relPath}" is a directory. Use inspect_dataset to browse directories.`);
|
|
523
|
+
}
|
|
524
|
+
// Warn for likely binary files
|
|
525
|
+
const binaryExts = new Set([
|
|
526
|
+
'.gz', '.bz2', '.xz', '.zip', '.tar', '.bam', '.cram',
|
|
527
|
+
'.sra', '.hdf5', '.h5', '.png', '.jpg', '.jpeg', '.gif',
|
|
528
|
+
'.pdf', '.doc', '.docx', '.xls', '.xlsx', '.pptx',
|
|
529
|
+
'.so', '.o', '.a', '.exe', '.dll', '.pyc',
|
|
530
|
+
]);
|
|
531
|
+
const ext = (0, path_1.extname)(filePath).toLowerCase();
|
|
532
|
+
if (binaryExts.has(ext)) {
|
|
533
|
+
return asText(`File "${relPath}" appears to be a binary file (${ext}). ` +
|
|
534
|
+
`Size: ${formatBytes(st.size)}. Use inspect_dataset or get_dataset_summary instead.`);
|
|
535
|
+
}
|
|
536
|
+
const safeTailLines = clampInt(tail_lines, 50, 1, 5000);
|
|
537
|
+
const content = tailReadFile(filePath, safeTailLines);
|
|
538
|
+
if (!content) {
|
|
539
|
+
return asText(`File "${relPath}" is empty (${formatBytes(st.size)}).`);
|
|
540
|
+
}
|
|
541
|
+
return asText(`=== ${relPath} (last ${safeTailLines} lines, size: ${formatBytes(st.size)}) ===\n${content}`);
|
|
542
|
+
});
|
|
543
|
+
// ── Tool: validate_dataset ───────────────────────────────
|
|
544
|
+
server.registerTool('validate_dataset', {
|
|
545
|
+
title: 'Validate Dataset Registration',
|
|
546
|
+
description: 'Validate that a host dataset path and name are usable before registering. ' +
|
|
547
|
+
'Checks existence, directory type, permissions, and name/path conflicts.',
|
|
548
|
+
inputSchema: {
|
|
549
|
+
path: zod_1.z.string().describe('Absolute host path to the dataset directory'),
|
|
550
|
+
name: zod_1.z.string().optional().describe('Proposed dataset name (optional, auto-derived from path)'),
|
|
551
|
+
mode: zod_1.z.enum(['ro', 'rw']).optional().default('ro').describe('Requested mount mode'),
|
|
552
|
+
},
|
|
553
|
+
}, async ({ path: dsPath, name, mode }) => {
|
|
554
|
+
const parsedPath = parseAbsolutePath(dsPath);
|
|
555
|
+
const requestedMode = mode || 'ro';
|
|
556
|
+
if ('error' in parsedPath) {
|
|
557
|
+
return asError(parsedPath.error);
|
|
558
|
+
}
|
|
559
|
+
const resolvedPath = parsedPath.resolved;
|
|
560
|
+
const proposedName = (name?.trim() ? name.trim() : datasetNameFromPath(resolvedPath)).replace(/[^a-zA-Z0-9._-]/g, '-');
|
|
561
|
+
const loaded = readRawConfig();
|
|
562
|
+
if ('error' in loaded)
|
|
563
|
+
return asError(loaded.error);
|
|
564
|
+
const exists = (0, fs_1.existsSync)(resolvedPath);
|
|
565
|
+
const isDirectory = exists ? (0, fs_1.statSync)(resolvedPath).isDirectory() : false;
|
|
566
|
+
const readable = exists ? canAccess(resolvedPath, fs_1.constants.R_OK) : false;
|
|
567
|
+
const writable = exists ? canAccess(resolvedPath, fs_1.constants.W_OK) : false;
|
|
568
|
+
const requiresWrite = requestedMode === 'rw';
|
|
569
|
+
const nameValid = validDatasetName(proposedName);
|
|
570
|
+
const nameConflict = findDatasetNameConflict(loaded.rawConfig, proposedName);
|
|
571
|
+
const pathConflict = findDatasetPathConflict(loaded.rawConfig, resolvedPath);
|
|
572
|
+
const valid = (exists &&
|
|
573
|
+
isDirectory &&
|
|
574
|
+
readable &&
|
|
575
|
+
(!requiresWrite || writable) &&
|
|
576
|
+
nameValid &&
|
|
577
|
+
!nameConflict &&
|
|
578
|
+
!pathConflict);
|
|
579
|
+
let sampleEntries = 0;
|
|
580
|
+
let sampleFiles = 0;
|
|
581
|
+
let sampleSizeBytes = 0;
|
|
582
|
+
if (exists && isDirectory && readable) {
|
|
583
|
+
const walked = walkDir(resolvedPath, 1);
|
|
584
|
+
sampleEntries = walked.length;
|
|
585
|
+
const files = walked.filter((e) => !e.isDir);
|
|
586
|
+
sampleFiles = files.length;
|
|
587
|
+
sampleSizeBytes = files.reduce((sum, f) => sum + f.size, 0);
|
|
588
|
+
}
|
|
589
|
+
return asJson({
|
|
590
|
+
valid,
|
|
591
|
+
path_input: dsPath,
|
|
592
|
+
resolved_path: resolvedPath,
|
|
593
|
+
requested_name: proposedName,
|
|
594
|
+
name_valid: nameValid,
|
|
595
|
+
mode_requested: requestedMode,
|
|
596
|
+
required_access: requiresWrite ? 'read/write' : 'read',
|
|
597
|
+
exists,
|
|
598
|
+
is_directory: isDirectory,
|
|
599
|
+
readable,
|
|
600
|
+
writable,
|
|
601
|
+
name_conflict: nameConflict ? registeredSummary(nameConflict) : null,
|
|
602
|
+
path_conflict: pathConflict ? registeredSummary(pathConflict) : null,
|
|
603
|
+
container_mount: `/datasets/${proposedName}`,
|
|
604
|
+
sample_entries_depth_1: sampleEntries,
|
|
605
|
+
sample_files_depth_1: sampleFiles,
|
|
606
|
+
sample_size_depth_1: formatBytes(sampleSizeBytes),
|
|
607
|
+
sample_size_bytes_depth_1: sampleSizeBytes,
|
|
608
|
+
});
|
|
609
|
+
});
|
|
610
|
+
// ── Tool: register_dataset ────────────────────────────
|
|
611
|
+
server.registerTool('register_dataset', {
|
|
612
|
+
title: 'Register Dataset',
|
|
613
|
+
description: 'Register a new dataset by adding a host directory to the LabGate config. ' +
|
|
614
|
+
'The dataset will be mounted at /datasets/{name} in the next container session. ' +
|
|
615
|
+
'Name is auto-derived from the directory basename if not provided.',
|
|
616
|
+
inputSchema: {
|
|
617
|
+
path: zod_1.z.string().describe('Absolute host path to the dataset directory'),
|
|
618
|
+
name: zod_1.z.string().optional().describe('Name for the dataset (used as mount point). Auto-derived from path if omitted.'),
|
|
619
|
+
mode: zod_1.z.enum(['ro', 'rw']).optional().default('ro').describe('Access mode: ro (read-only, default) or rw (read-write)'),
|
|
620
|
+
description: zod_1.z.string().optional().describe('Brief description of the dataset contents'),
|
|
621
|
+
},
|
|
622
|
+
}, async ({ path: dsPath, name, mode, description }) => {
|
|
623
|
+
const parsedPath = parseAbsolutePath(dsPath);
|
|
624
|
+
if ('error' in parsedPath)
|
|
625
|
+
return asError(parsedPath.error);
|
|
626
|
+
const resolvedPath = parsedPath.resolved;
|
|
627
|
+
const dirCheck = ensureDirectory(resolvedPath);
|
|
628
|
+
if ('error' in dirCheck)
|
|
629
|
+
return asError(dirCheck.error);
|
|
630
|
+
const dsName = (name?.trim() ? name.trim() : datasetNameFromPath(resolvedPath)).replace(/[^a-zA-Z0-9._-]/g, '-');
|
|
631
|
+
if (!validDatasetName(dsName)) {
|
|
632
|
+
return asError(`Invalid dataset name: "${dsName}". Use alphanumerics, hyphens, dots, underscores. Must start with alphanumeric.`);
|
|
633
|
+
}
|
|
634
|
+
const requestedMode = mode || 'ro';
|
|
635
|
+
if (!canAccess(resolvedPath, fs_1.constants.R_OK)) {
|
|
636
|
+
return asError(`Path is not readable: ${resolvedPath}`);
|
|
637
|
+
}
|
|
638
|
+
if (requestedMode === 'rw' && !canAccess(resolvedPath, fs_1.constants.W_OK)) {
|
|
639
|
+
return asError(`Path is not writable for rw mode: ${resolvedPath}`);
|
|
640
|
+
}
|
|
641
|
+
const loaded = readRawConfig();
|
|
642
|
+
if ('error' in loaded)
|
|
643
|
+
return asError(loaded.error);
|
|
644
|
+
const existingByName = findDatasetNameConflict(loaded.rawConfig, dsName);
|
|
645
|
+
if (existingByName) {
|
|
646
|
+
return asError(`Dataset name already exists: ${registeredSummary(existingByName)}`);
|
|
647
|
+
}
|
|
648
|
+
const existingByPath = findDatasetPathConflict(loaded.rawConfig, resolvedPath);
|
|
649
|
+
if (existingByPath) {
|
|
650
|
+
return asError(`Dataset path already exists: ${registeredSummary(existingByPath)}`);
|
|
651
|
+
}
|
|
652
|
+
const newDs = { path: dsPath, name: dsName, mode: requestedMode };
|
|
653
|
+
const desc = description?.trim();
|
|
654
|
+
if (desc)
|
|
655
|
+
newDs.description = desc;
|
|
656
|
+
loaded.rawConfig.datasets.push(newDs);
|
|
657
|
+
const writeResult = writeRawConfig(loaded.configPath, loaded.rawConfig);
|
|
658
|
+
if ('error' in writeResult)
|
|
659
|
+
return asError(writeResult.error);
|
|
660
|
+
return asText(`Dataset "${dsName}" registered.\n` +
|
|
661
|
+
` Path: ${dsPath}\n` +
|
|
662
|
+
` Mode: ${newDs.mode}\n` +
|
|
663
|
+
` Container mount: /datasets/${dsName}\n` +
|
|
664
|
+
(desc ? ` Description: ${desc}\n` : '') +
|
|
665
|
+
`\nNote: The dataset will be mounted in the next container session (restart required).`);
|
|
666
|
+
});
|
|
667
|
+
// ── Tool: update_dataset ────────────────────────────────
|
|
668
|
+
server.registerTool('update_dataset', {
|
|
669
|
+
title: 'Update Dataset',
|
|
670
|
+
description: 'Update an existing dataset registration (name, path, mode, description). ' +
|
|
671
|
+
'Changes apply on next session restart.',
|
|
672
|
+
inputSchema: {
|
|
673
|
+
name: zod_1.z.string().describe('Current dataset name to update'),
|
|
674
|
+
new_name: zod_1.z.string().optional().describe('New dataset name'),
|
|
675
|
+
path: zod_1.z.string().optional().describe('New absolute host path'),
|
|
676
|
+
mode: zod_1.z.enum(['ro', 'rw']).optional().describe('New mode'),
|
|
677
|
+
description: zod_1.z.string().optional().describe('New description (empty string clears)'),
|
|
678
|
+
clear_description: zod_1.z.boolean().optional().default(false).describe('Clear description explicitly'),
|
|
679
|
+
},
|
|
680
|
+
}, async ({ name, new_name, path: nextPath, mode: nextMode, description: nextDescription, clear_description }) => {
|
|
681
|
+
if (!name || !name.trim())
|
|
682
|
+
return asError('Dataset name is required.');
|
|
683
|
+
const requestedMutation = new_name !== undefined ||
|
|
684
|
+
nextPath !== undefined ||
|
|
685
|
+
nextMode !== undefined ||
|
|
686
|
+
nextDescription !== undefined ||
|
|
687
|
+
clear_description;
|
|
688
|
+
if (!requestedMutation)
|
|
689
|
+
return asError('No update fields provided.');
|
|
690
|
+
const loaded = readRawConfig();
|
|
691
|
+
if ('error' in loaded)
|
|
692
|
+
return asError(loaded.error);
|
|
693
|
+
const idx = findDatasetIndex(loaded.rawConfig, name.trim());
|
|
694
|
+
if (idx < 0) {
|
|
695
|
+
return asError(`Dataset "${name}" not found. Use list_datasets to see registered datasets.`);
|
|
696
|
+
}
|
|
697
|
+
const current = loaded.rawConfig.datasets[idx];
|
|
698
|
+
const before = {
|
|
699
|
+
name: current.name,
|
|
700
|
+
path: current.path,
|
|
701
|
+
mode: current.mode,
|
|
702
|
+
description: current.description,
|
|
703
|
+
};
|
|
704
|
+
let updatedPath = current.path;
|
|
705
|
+
let updatedPathResolved = (0, path_1.resolve)(String(current.path).replace(/^~/, (0, os_1.homedir)()));
|
|
706
|
+
if (nextPath !== undefined) {
|
|
707
|
+
const parsed = parseAbsolutePath(nextPath);
|
|
708
|
+
if ('error' in parsed)
|
|
709
|
+
return asError(parsed.error);
|
|
710
|
+
const dirCheck = ensureDirectory(parsed.resolved);
|
|
711
|
+
if ('error' in dirCheck)
|
|
712
|
+
return asError(dirCheck.error);
|
|
713
|
+
updatedPath = nextPath;
|
|
714
|
+
updatedPathResolved = (0, path_1.resolve)(parsed.resolved);
|
|
715
|
+
}
|
|
716
|
+
const updatedName = (new_name?.trim() ? new_name.trim() : current.name).replace(/[^a-zA-Z0-9._-]/g, '-');
|
|
717
|
+
if (!validDatasetName(updatedName)) {
|
|
718
|
+
return asError(`Invalid dataset name: "${updatedName}". Use alphanumerics, hyphens, dots, underscores. Must start with alphanumeric.`);
|
|
719
|
+
}
|
|
720
|
+
const updatedMode = nextMode || current.mode || 'ro';
|
|
721
|
+
if (!canAccess(updatedPathResolved, fs_1.constants.R_OK)) {
|
|
722
|
+
return asError(`Path is not readable: ${updatedPathResolved}`);
|
|
723
|
+
}
|
|
724
|
+
if (updatedMode === 'rw' && !canAccess(updatedPathResolved, fs_1.constants.W_OK)) {
|
|
725
|
+
return asError(`Path is not writable for rw mode: ${updatedPathResolved}`);
|
|
726
|
+
}
|
|
727
|
+
const nameConflict = findDatasetNameConflict(loaded.rawConfig, updatedName, idx);
|
|
728
|
+
if (nameConflict) {
|
|
729
|
+
return asError(`Dataset name already exists: ${registeredSummary(nameConflict)}`);
|
|
730
|
+
}
|
|
731
|
+
const pathConflict = findDatasetPathConflict(loaded.rawConfig, updatedPathResolved, idx);
|
|
732
|
+
if (pathConflict) {
|
|
733
|
+
return asError(`Dataset path already exists: ${registeredSummary(pathConflict)}`);
|
|
734
|
+
}
|
|
735
|
+
let updatedDescription = current.description;
|
|
736
|
+
if (clear_description) {
|
|
737
|
+
updatedDescription = undefined;
|
|
738
|
+
}
|
|
739
|
+
else if (nextDescription !== undefined) {
|
|
740
|
+
const trimmed = nextDescription.trim();
|
|
741
|
+
updatedDescription = trimmed || undefined;
|
|
742
|
+
}
|
|
743
|
+
const next = {
|
|
744
|
+
name: updatedName,
|
|
745
|
+
path: updatedPath,
|
|
746
|
+
mode: updatedMode,
|
|
747
|
+
};
|
|
748
|
+
if (updatedDescription)
|
|
749
|
+
next.description = updatedDescription;
|
|
750
|
+
const changed = before.name !== next.name ||
|
|
751
|
+
before.path !== next.path ||
|
|
752
|
+
before.mode !== next.mode ||
|
|
753
|
+
(before.description || undefined) !== (next.description || undefined);
|
|
754
|
+
if (!changed) {
|
|
755
|
+
return asText(`No changes for dataset "${name}".`);
|
|
756
|
+
}
|
|
757
|
+
loaded.rawConfig.datasets[idx] = next;
|
|
758
|
+
const writeResult = writeRawConfig(loaded.configPath, loaded.rawConfig);
|
|
759
|
+
if ('error' in writeResult)
|
|
760
|
+
return asError(writeResult.error);
|
|
761
|
+
return asText(`Dataset "${before.name}" updated.\n` +
|
|
762
|
+
` Before: ${before.path} (${before.mode})\n` +
|
|
763
|
+
` After: ${next.path} (${next.mode}) as "${next.name}"\n` +
|
|
764
|
+
` Description: ${next.description || '(none)'}\n` +
|
|
765
|
+
`\nNote: Changes take effect in the next container session (restart required).`);
|
|
766
|
+
});
|
|
767
|
+
// ── Tool: unregister_dataset ─────────────────────────
|
|
768
|
+
server.registerTool('unregister_dataset', {
|
|
769
|
+
title: 'Unregister Dataset',
|
|
770
|
+
description: 'Remove a dataset from the LabGate config. The mount will be removed on the next container session.',
|
|
771
|
+
inputSchema: {
|
|
772
|
+
name: zod_1.z.string().describe('Name of the dataset to remove'),
|
|
773
|
+
},
|
|
774
|
+
}, async ({ name }) => {
|
|
775
|
+
if (!name || !name.trim()) {
|
|
776
|
+
return asError('Dataset name is required.');
|
|
777
|
+
}
|
|
778
|
+
const loaded = readRawConfig();
|
|
779
|
+
if ('error' in loaded)
|
|
780
|
+
return asError(loaded.error);
|
|
781
|
+
const idx = findDatasetIndex(loaded.rawConfig, name.trim());
|
|
782
|
+
if (idx < 0) {
|
|
783
|
+
return asError(`Dataset "${name}" not found. Use list_datasets to see registered datasets.`);
|
|
784
|
+
}
|
|
785
|
+
const removed = loaded.rawConfig.datasets.splice(idx, 1)[0];
|
|
786
|
+
const writeResult = writeRawConfig(loaded.configPath, loaded.rawConfig);
|
|
787
|
+
if ('error' in writeResult)
|
|
788
|
+
return asError(writeResult.error);
|
|
789
|
+
return asText(`Dataset "${removed.name}" removed.\n` +
|
|
790
|
+
` Was: ${removed.path} (${removed.mode})\n` +
|
|
791
|
+
`\nNote: The mount will be removed on the next container session (restart required).`);
|
|
792
|
+
});
|
|
793
|
+
// ── Connect and start ──────────────────────────────────
|
|
794
|
+
const transport = new stdio_js_1.StdioServerTransport();
|
|
795
|
+
await server.connect(transport);
|
|
796
|
+
process.on('SIGINT', () => process.exit(0));
|
|
797
|
+
process.on('SIGTERM', () => process.exit(0));
|
|
798
|
+
}
|
|
799
|
+
function isDirectRun() {
|
|
800
|
+
const argv1 = process.argv[1] || '';
|
|
801
|
+
return /(?:^|\/|\\)dataset-mcp(?:\.bundle)?(?:\.(?:js|mjs))?$/.test(argv1);
|
|
802
|
+
}
|
|
803
|
+
if (isDirectRun()) {
|
|
804
|
+
main().catch((err) => {
|
|
805
|
+
console.error('Dataset MCP server error:', err);
|
|
806
|
+
process.exit(1);
|
|
807
|
+
});
|
|
808
|
+
}
|
|
809
|
+
//# sourceMappingURL=dataset-mcp.js.map
|