postgres-scout-mcp 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +190 -0
- package/README.md +234 -0
- package/bin/cli.js +67 -0
- package/dist/config/environment.js +52 -0
- package/dist/index.js +59 -0
- package/dist/server/setup.js +122 -0
- package/dist/tools/data-quality.js +442 -0
- package/dist/tools/database.js +148 -0
- package/dist/tools/export.js +223 -0
- package/dist/tools/index.js +52 -0
- package/dist/tools/live-monitoring.js +369 -0
- package/dist/tools/maintenance.js +617 -0
- package/dist/tools/monitoring.js +286 -0
- package/dist/tools/mutations.js +410 -0
- package/dist/tools/optimization.js +1094 -0
- package/dist/tools/query.js +138 -0
- package/dist/tools/relationships.js +261 -0
- package/dist/tools/schema.js +253 -0
- package/dist/tools/temporal.js +313 -0
- package/dist/types.js +2 -0
- package/dist/utils/database.js +123 -0
- package/dist/utils/logger.js +73 -0
- package/dist/utils/query-builder.js +180 -0
- package/dist/utils/rate-limiter.js +39 -0
- package/dist/utils/result-formatter.js +42 -0
- package/dist/utils/sanitize.js +525 -0
- package/dist/utils/zod-to-json-schema.js +85 -0
- package/package.json +58 -0
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
import { ensureDatabaseExists, executeInternalQuery, getCurrentDatabaseName } from '../utils/database.js';
|
|
3
|
+
const ListDatabasesSchema = z.object({});
|
|
4
|
+
const GetDatabaseStatsSchema = z.object({
|
|
5
|
+
database: z.string().optional()
|
|
6
|
+
});
|
|
7
|
+
export async function listDatabases(connection, logger, args) {
|
|
8
|
+
logger.info('listDatabases', 'Listing all databases');
|
|
9
|
+
const query = `
|
|
10
|
+
SELECT
|
|
11
|
+
d.datname as name,
|
|
12
|
+
pg_catalog.pg_get_userbyid(d.datdba) as owner,
|
|
13
|
+
pg_catalog.pg_encoding_to_char(d.encoding) as encoding,
|
|
14
|
+
pg_catalog.pg_database_size(d.datname) as size_bytes,
|
|
15
|
+
(SELECT COUNT(*) FROM pg_catalog.pg_stat_activity WHERE datname = d.datname) as connections
|
|
16
|
+
FROM pg_catalog.pg_database d
|
|
17
|
+
WHERE d.datistemplate = false
|
|
18
|
+
ORDER BY d.datname;
|
|
19
|
+
`;
|
|
20
|
+
const result = await executeInternalQuery(connection, logger, { query });
|
|
21
|
+
return {
|
|
22
|
+
databases: result.rows.map(row => ({
|
|
23
|
+
name: row.name,
|
|
24
|
+
owner: row.owner,
|
|
25
|
+
encoding: row.encoding,
|
|
26
|
+
sizeBytes: parseInt(row.size_bytes, 10),
|
|
27
|
+
sizeMB: (parseInt(row.size_bytes, 10) / 1024 / 1024).toFixed(2),
|
|
28
|
+
connections: parseInt(row.connections, 10)
|
|
29
|
+
}))
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
export async function getDatabaseStats(connection, logger, args) {
|
|
33
|
+
const currentDatabase = await getCurrentDatabaseName(connection, logger);
|
|
34
|
+
const requestedDatabase = args.database;
|
|
35
|
+
if (requestedDatabase) {
|
|
36
|
+
await ensureDatabaseExists(connection, logger, requestedDatabase);
|
|
37
|
+
if (requestedDatabase !== currentDatabase) {
|
|
38
|
+
throw new Error(`Connected to "${currentDatabase}". Reconnect to "${requestedDatabase}" to fetch its stats.`);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
logger.info('getDatabaseStats', 'Getting database statistics', { database: currentDatabase });
|
|
42
|
+
const queries = await Promise.all([
|
|
43
|
+
getSizeStats(connection, logger, currentDatabase),
|
|
44
|
+
getObjectCounts(connection, logger),
|
|
45
|
+
getConnectionStats(connection, logger),
|
|
46
|
+
getCacheStats(connection, logger),
|
|
47
|
+
getTupleStats(connection, logger)
|
|
48
|
+
]);
|
|
49
|
+
const [sizeStats, objectCounts, connectionStats, cacheStats, tupleStats] = queries;
|
|
50
|
+
return {
|
|
51
|
+
database: currentDatabase,
|
|
52
|
+
size: sizeStats.size,
|
|
53
|
+
tables: objectCounts.tables,
|
|
54
|
+
indexes: objectCounts.indexes,
|
|
55
|
+
sequences: objectCounts.sequences,
|
|
56
|
+
views: objectCounts.views,
|
|
57
|
+
functions: objectCounts.functions,
|
|
58
|
+
activeConnections: connectionStats.active,
|
|
59
|
+
maxConnections: connectionStats.max,
|
|
60
|
+
cacheHitRatio: cacheStats.ratio,
|
|
61
|
+
transactionRate: cacheStats.transactionRate,
|
|
62
|
+
tupleStats: tupleStats
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
async function getSizeStats(connection, logger, database) {
|
|
66
|
+
const query = `SELECT pg_size_pretty(pg_database_size(current_database())) as size`;
|
|
67
|
+
const result = await executeInternalQuery(connection, logger, { query });
|
|
68
|
+
return { size: result.rows[0]?.size || '0' };
|
|
69
|
+
}
|
|
70
|
+
async function getObjectCounts(connection, logger) {
|
|
71
|
+
const query = `
|
|
72
|
+
SELECT
|
|
73
|
+
(SELECT COUNT(*) FROM pg_catalog.pg_class WHERE relkind = 'r') as tables,
|
|
74
|
+
(SELECT COUNT(*) FROM pg_catalog.pg_class WHERE relkind = 'i') as indexes,
|
|
75
|
+
(SELECT COUNT(*) FROM pg_catalog.pg_class WHERE relkind = 'S') as sequences,
|
|
76
|
+
(SELECT COUNT(*) FROM pg_catalog.pg_class WHERE relkind = 'v') as views,
|
|
77
|
+
(SELECT COUNT(*) FROM pg_catalog.pg_proc) as functions
|
|
78
|
+
`;
|
|
79
|
+
const result = await executeInternalQuery(connection, logger, { query });
|
|
80
|
+
return {
|
|
81
|
+
tables: parseInt(result.rows[0]?.tables || '0', 10),
|
|
82
|
+
indexes: parseInt(result.rows[0]?.indexes || '0', 10),
|
|
83
|
+
sequences: parseInt(result.rows[0]?.sequences || '0', 10),
|
|
84
|
+
views: parseInt(result.rows[0]?.views || '0', 10),
|
|
85
|
+
functions: parseInt(result.rows[0]?.functions || '0', 10)
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
async function getConnectionStats(connection, logger) {
|
|
89
|
+
const query = `
|
|
90
|
+
SELECT
|
|
91
|
+
(SELECT COUNT(*) FROM pg_catalog.pg_stat_activity) as active,
|
|
92
|
+
(SELECT setting::int FROM pg_catalog.pg_settings WHERE name = 'max_connections') as max
|
|
93
|
+
`;
|
|
94
|
+
const result = await executeInternalQuery(connection, logger, { query });
|
|
95
|
+
return {
|
|
96
|
+
active: parseInt(result.rows[0]?.active || '0', 10),
|
|
97
|
+
max: parseInt(result.rows[0]?.max || '100', 10)
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
async function getCacheStats(connection, logger) {
|
|
101
|
+
const query = `
|
|
102
|
+
SELECT
|
|
103
|
+
CASE
|
|
104
|
+
WHEN (blks_hit + blks_read) = 0 THEN 0
|
|
105
|
+
ELSE ROUND(blks_hit::numeric / (blks_hit + blks_read), 4)
|
|
106
|
+
END as cache_hit_ratio,
|
|
107
|
+
xact_commit + xact_rollback as total_transactions
|
|
108
|
+
FROM pg_catalog.pg_stat_database
|
|
109
|
+
WHERE datname = current_database()
|
|
110
|
+
`;
|
|
111
|
+
const result = await executeInternalQuery(connection, logger, { query });
|
|
112
|
+
return {
|
|
113
|
+
ratio: parseFloat(result.rows[0]?.cache_hit_ratio || '0'),
|
|
114
|
+
transactionRate: parseInt(result.rows[0]?.total_transactions || '0', 10)
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
async function getTupleStats(connection, logger) {
|
|
118
|
+
const query = `
|
|
119
|
+
SELECT
|
|
120
|
+
tup_returned,
|
|
121
|
+
tup_fetched,
|
|
122
|
+
tup_inserted,
|
|
123
|
+
tup_updated,
|
|
124
|
+
tup_deleted
|
|
125
|
+
FROM pg_catalog.pg_stat_database
|
|
126
|
+
WHERE datname = current_database()
|
|
127
|
+
`;
|
|
128
|
+
const result = await executeInternalQuery(connection, logger, { query });
|
|
129
|
+
const row = result.rows[0] || {};
|
|
130
|
+
return {
|
|
131
|
+
returned: parseInt(row.tup_returned || '0', 10),
|
|
132
|
+
fetched: parseInt(row.tup_fetched || '0', 10),
|
|
133
|
+
inserted: parseInt(row.tup_inserted || '0', 10),
|
|
134
|
+
updated: parseInt(row.tup_updated || '0', 10),
|
|
135
|
+
deleted: parseInt(row.tup_deleted || '0', 10)
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
export const databaseTools = {
|
|
139
|
+
listDatabases: {
|
|
140
|
+
schema: ListDatabasesSchema,
|
|
141
|
+
handler: listDatabases
|
|
142
|
+
},
|
|
143
|
+
getDatabaseStats: {
|
|
144
|
+
schema: GetDatabaseStatsSchema,
|
|
145
|
+
handler: getDatabaseStats
|
|
146
|
+
}
|
|
147
|
+
};
|
|
148
|
+
//# sourceMappingURL=database.js.map
|
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
import { executeInternalQuery } from '../utils/database.js';
|
|
3
|
+
import { escapeIdentifier, sanitizeIdentifier } from '../utils/sanitize.js';
|
|
4
|
+
import { WhereConditionSchema, buildWhereClause } from '../utils/query-builder.js';
|
|
5
|
+
const ExportTableSchema = z.object({
|
|
6
|
+
table: z.string(),
|
|
7
|
+
format: z.preprocess((val) => typeof val === 'string' ? val.toLowerCase() : val, z.enum(['csv', 'json', 'jsonl', 'sql'])),
|
|
8
|
+
schema: z.string().optional().default('public'),
|
|
9
|
+
where: z.array(WhereConditionSchema).optional(),
|
|
10
|
+
columns: z.array(z.string()).optional(),
|
|
11
|
+
limit: z.number().optional().default(10000),
|
|
12
|
+
includeHeaders: z.boolean().optional().default(true)
|
|
13
|
+
});
|
|
14
|
+
const GenerateInsertStatementsSchema = z.object({
|
|
15
|
+
table: z.string(),
|
|
16
|
+
schema: z.string().optional().default('public'),
|
|
17
|
+
where: z.array(WhereConditionSchema).optional(),
|
|
18
|
+
batchSize: z.number().optional().default(100),
|
|
19
|
+
includeSchema: z.boolean().optional().default(true),
|
|
20
|
+
limit: z.number().optional().default(1000)
|
|
21
|
+
});
|
|
22
|
+
export async function exportTable(connection, logger, args) {
|
|
23
|
+
const { table, format, schema, where, columns, limit, includeHeaders } = args;
|
|
24
|
+
logger.info('exportTable', 'Exporting table data', { table, format });
|
|
25
|
+
const sanitizedSchema = sanitizeIdentifier(schema);
|
|
26
|
+
const sanitizedTable = sanitizeIdentifier(table);
|
|
27
|
+
const columnList = columns && columns.length > 0
|
|
28
|
+
? columns.map(sanitizeIdentifier).map(escapeIdentifier).join(', ')
|
|
29
|
+
: '*';
|
|
30
|
+
let whereClause = '';
|
|
31
|
+
let queryParams = [limit];
|
|
32
|
+
if (where && where.length > 0) {
|
|
33
|
+
const built = buildWhereClause(where, 2); // $1 is limit
|
|
34
|
+
whereClause = `WHERE ${built.clause}`;
|
|
35
|
+
queryParams = [limit, ...built.params];
|
|
36
|
+
}
|
|
37
|
+
const query = `
|
|
38
|
+
SELECT ${columnList}
|
|
39
|
+
FROM ${escapeIdentifier(sanitizedSchema)}.${escapeIdentifier(sanitizedTable)}
|
|
40
|
+
${whereClause}
|
|
41
|
+
LIMIT $1
|
|
42
|
+
`;
|
|
43
|
+
const startTime = Date.now();
|
|
44
|
+
const result = await executeInternalQuery(connection, logger, {
|
|
45
|
+
query,
|
|
46
|
+
params: queryParams
|
|
47
|
+
});
|
|
48
|
+
const executionTimeMs = Date.now() - startTime;
|
|
49
|
+
let output = '';
|
|
50
|
+
let preview = '';
|
|
51
|
+
switch (format) {
|
|
52
|
+
case 'csv':
|
|
53
|
+
output = formatAsCSV(result.rows, includeHeaders);
|
|
54
|
+
preview = output.split('\n').slice(0, 5).join('\n');
|
|
55
|
+
break;
|
|
56
|
+
case 'json':
|
|
57
|
+
output = JSON.stringify(result.rows, null, 2);
|
|
58
|
+
preview = output.substring(0, 500);
|
|
59
|
+
break;
|
|
60
|
+
case 'jsonl':
|
|
61
|
+
output = result.rows.map(row => JSON.stringify(row)).join('\n');
|
|
62
|
+
preview = output.split('\n').slice(0, 5).join('\n');
|
|
63
|
+
break;
|
|
64
|
+
case 'sql':
|
|
65
|
+
output = formatAsSQL(sanitizedSchema, sanitizedTable, result.rows);
|
|
66
|
+
preview = output.split('\n').slice(0, 10).join('\n');
|
|
67
|
+
break;
|
|
68
|
+
default:
|
|
69
|
+
throw new Error(`Unsupported format: ${format}`);
|
|
70
|
+
}
|
|
71
|
+
const sizeBytes = Buffer.byteLength(output, 'utf8');
|
|
72
|
+
const sizeMB = (sizeBytes / 1024 / 1024).toFixed(2);
|
|
73
|
+
return {
|
|
74
|
+
table,
|
|
75
|
+
schema,
|
|
76
|
+
format,
|
|
77
|
+
rowsExported: result.rows.length,
|
|
78
|
+
sizeBytes,
|
|
79
|
+
sizeMB,
|
|
80
|
+
executionTimeMs,
|
|
81
|
+
preview: preview + (output.length > preview.length ? '\n...' : ''),
|
|
82
|
+
data: output
|
|
83
|
+
};
|
|
84
|
+
}
|
|
85
|
+
export async function generateInsertStatements(connection, logger, args) {
|
|
86
|
+
const { table, schema, where, batchSize, includeSchema, limit } = args;
|
|
87
|
+
logger.info('generateInsertStatements', 'Generating INSERT statements', { table });
|
|
88
|
+
const sanitizedSchema = sanitizeIdentifier(schema);
|
|
89
|
+
const sanitizedTable = sanitizeIdentifier(table);
|
|
90
|
+
const columnsQuery = `
|
|
91
|
+
SELECT column_name
|
|
92
|
+
FROM information_schema.columns
|
|
93
|
+
WHERE table_schema = $1
|
|
94
|
+
AND table_name = $2
|
|
95
|
+
ORDER BY ordinal_position
|
|
96
|
+
`;
|
|
97
|
+
const columnsResult = await executeInternalQuery(connection, logger, {
|
|
98
|
+
query: columnsQuery,
|
|
99
|
+
params: [sanitizedSchema, sanitizedTable]
|
|
100
|
+
});
|
|
101
|
+
const columns = columnsResult.rows.map(row => row.column_name);
|
|
102
|
+
let whereClause = '';
|
|
103
|
+
let dataQueryParams = [limit];
|
|
104
|
+
if (where && where.length > 0) {
|
|
105
|
+
const built = buildWhereClause(where, 2); // $1 is limit
|
|
106
|
+
whereClause = `WHERE ${built.clause}`;
|
|
107
|
+
dataQueryParams = [limit, ...built.params];
|
|
108
|
+
}
|
|
109
|
+
const dataQuery = `
|
|
110
|
+
SELECT *
|
|
111
|
+
FROM ${escapeIdentifier(sanitizedSchema)}.${escapeIdentifier(sanitizedTable)}
|
|
112
|
+
${whereClause}
|
|
113
|
+
LIMIT $1
|
|
114
|
+
`;
|
|
115
|
+
const dataResult = await executeInternalQuery(connection, logger, {
|
|
116
|
+
query: dataQuery,
|
|
117
|
+
params: dataQueryParams
|
|
118
|
+
});
|
|
119
|
+
const statements = [];
|
|
120
|
+
const batches = Math.ceil(dataResult.rows.length / batchSize);
|
|
121
|
+
const tableName = includeSchema
|
|
122
|
+
? `${escapeIdentifier(sanitizedSchema)}.${escapeIdentifier(sanitizedTable)}`
|
|
123
|
+
: escapeIdentifier(sanitizedTable);
|
|
124
|
+
for (let i = 0; i < batches; i++) {
|
|
125
|
+
const batchStart = i * batchSize;
|
|
126
|
+
const batchEnd = Math.min((i + 1) * batchSize, dataResult.rows.length);
|
|
127
|
+
const batchRows = dataResult.rows.slice(batchStart, batchEnd);
|
|
128
|
+
statements.push(`-- Batch ${i + 1} (${batchRows.length} rows)`);
|
|
129
|
+
const columnNames = columns.map(escapeIdentifier).join(', ');
|
|
130
|
+
statements.push(`INSERT INTO ${tableName} (${columnNames}) VALUES`);
|
|
131
|
+
const valueRows = batchRows.map((row, idx) => {
|
|
132
|
+
const values = columns.map(col => formatValue(row[col]));
|
|
133
|
+
const isLast = idx === batchRows.length - 1;
|
|
134
|
+
return ` (${values.join(', ')})${isLast ? ';' : ','}`;
|
|
135
|
+
});
|
|
136
|
+
statements.push(...valueRows);
|
|
137
|
+
statements.push('');
|
|
138
|
+
}
|
|
139
|
+
return {
|
|
140
|
+
table,
|
|
141
|
+
schema,
|
|
142
|
+
rowCount: dataResult.rows.length,
|
|
143
|
+
batchCount: batches,
|
|
144
|
+
batchSize,
|
|
145
|
+
statements: statements.join('\n')
|
|
146
|
+
};
|
|
147
|
+
}
|
|
148
|
+
function formatAsCSV(rows, includeHeaders) {
|
|
149
|
+
if (rows.length === 0)
|
|
150
|
+
return '';
|
|
151
|
+
const columns = Object.keys(rows[0]);
|
|
152
|
+
const lines = [];
|
|
153
|
+
function escapeCsvValue(value) {
|
|
154
|
+
const trimmed = value.trimStart();
|
|
155
|
+
const needsFormulaEscape = /^[=+\-@]/.test(trimmed);
|
|
156
|
+
const safeValue = needsFormulaEscape ? `'${value}` : value;
|
|
157
|
+
if (safeValue.includes(',') || safeValue.includes('"') || safeValue.includes('\n')) {
|
|
158
|
+
return `"${safeValue.replace(/"/g, '""')}"`;
|
|
159
|
+
}
|
|
160
|
+
return safeValue;
|
|
161
|
+
}
|
|
162
|
+
if (includeHeaders) {
|
|
163
|
+
lines.push(columns.join(','));
|
|
164
|
+
}
|
|
165
|
+
for (const row of rows) {
|
|
166
|
+
const values = columns.map(col => {
|
|
167
|
+
const value = row[col];
|
|
168
|
+
if (value === null || value === undefined)
|
|
169
|
+
return '';
|
|
170
|
+
const str = String(value);
|
|
171
|
+
return escapeCsvValue(str);
|
|
172
|
+
});
|
|
173
|
+
lines.push(values.join(','));
|
|
174
|
+
}
|
|
175
|
+
return lines.join('\n');
|
|
176
|
+
}
|
|
177
|
+
function formatAsSQL(schema, table, rows) {
|
|
178
|
+
if (rows.length === 0)
|
|
179
|
+
return '';
|
|
180
|
+
const columns = Object.keys(rows[0]);
|
|
181
|
+
const tableName = `${escapeIdentifier(schema)}.${escapeIdentifier(table)}`;
|
|
182
|
+
const columnNames = columns.map(escapeIdentifier).join(', ');
|
|
183
|
+
const statements = [];
|
|
184
|
+
statements.push(`-- INSERT statements for ${tableName}`);
|
|
185
|
+
statements.push(`INSERT INTO ${tableName} (${columnNames}) VALUES`);
|
|
186
|
+
const valueRows = rows.map((row, idx) => {
|
|
187
|
+
const values = columns.map(col => formatValue(row[col]));
|
|
188
|
+
const isLast = idx === rows.length - 1;
|
|
189
|
+
return ` (${values.join(', ')})${isLast ? ';' : ','}`;
|
|
190
|
+
});
|
|
191
|
+
statements.push(...valueRows);
|
|
192
|
+
return statements.join('\n');
|
|
193
|
+
}
|
|
194
|
+
function formatValue(value) {
|
|
195
|
+
if (value === null || value === undefined) {
|
|
196
|
+
return 'NULL';
|
|
197
|
+
}
|
|
198
|
+
if (typeof value === 'number') {
|
|
199
|
+
return String(value);
|
|
200
|
+
}
|
|
201
|
+
if (typeof value === 'boolean') {
|
|
202
|
+
return value ? 'TRUE' : 'FALSE';
|
|
203
|
+
}
|
|
204
|
+
if (value instanceof Date) {
|
|
205
|
+
return `'${value.toISOString()}'`;
|
|
206
|
+
}
|
|
207
|
+
if (typeof value === 'object') {
|
|
208
|
+
return `'${JSON.stringify(value).replace(/'/g, "''")}'`;
|
|
209
|
+
}
|
|
210
|
+
const str = String(value);
|
|
211
|
+
return `'${str.replace(/'/g, "''")}'`;
|
|
212
|
+
}
|
|
213
|
+
export const exportTools = {
|
|
214
|
+
exportTable: {
|
|
215
|
+
schema: ExportTableSchema,
|
|
216
|
+
handler: exportTable
|
|
217
|
+
},
|
|
218
|
+
generateInsertStatements: {
|
|
219
|
+
schema: GenerateInsertStatementsSchema,
|
|
220
|
+
handler: generateInsertStatements
|
|
221
|
+
}
|
|
222
|
+
};
|
|
223
|
+
//# sourceMappingURL=export.js.map
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { databaseTools } from './database.js';
|
|
2
|
+
import { schemaTools } from './schema.js';
|
|
3
|
+
import { queryTools } from './query.js';
|
|
4
|
+
import { dataQualityTools } from './data-quality.js';
|
|
5
|
+
import { temporalTools } from './temporal.js';
|
|
6
|
+
import { monitoringTools } from './monitoring.js';
|
|
7
|
+
import { relationshipTools } from './relationships.js';
|
|
8
|
+
import { exportTools } from './export.js';
|
|
9
|
+
import { maintenanceTools } from './maintenance.js';
|
|
10
|
+
import { optimizationTools } from './optimization.js';
|
|
11
|
+
import { mutationTools } from './mutations.js';
|
|
12
|
+
import { liveMonitoringTools } from './live-monitoring.js';
|
|
13
|
+
export const tools = {
|
|
14
|
+
...databaseTools,
|
|
15
|
+
...schemaTools,
|
|
16
|
+
...queryTools,
|
|
17
|
+
...dataQualityTools,
|
|
18
|
+
...temporalTools,
|
|
19
|
+
...monitoringTools,
|
|
20
|
+
...relationshipTools,
|
|
21
|
+
...exportTools,
|
|
22
|
+
...maintenanceTools,
|
|
23
|
+
...optimizationTools,
|
|
24
|
+
...mutationTools,
|
|
25
|
+
...liveMonitoringTools
|
|
26
|
+
};
|
|
27
|
+
export function getToolNames() {
|
|
28
|
+
return Object.keys(tools);
|
|
29
|
+
}
|
|
30
|
+
export function getTool(name) {
|
|
31
|
+
return tools[name];
|
|
32
|
+
}
|
|
33
|
+
export async function executeTool(name, connection, logger, args) {
|
|
34
|
+
const tool = getTool(name);
|
|
35
|
+
if (!tool) {
|
|
36
|
+
throw new Error(`Tool "${name}" not found`);
|
|
37
|
+
}
|
|
38
|
+
const validatedArgs = tool.schema.parse(args);
|
|
39
|
+
logger.info('tool', `Executing tool: ${name}`, { args: validatedArgs });
|
|
40
|
+
try {
|
|
41
|
+
const result = await tool.handler(connection, logger, validatedArgs);
|
|
42
|
+
logger.info('tool', `Tool ${name} completed successfully`);
|
|
43
|
+
return result;
|
|
44
|
+
}
|
|
45
|
+
catch (error) {
|
|
46
|
+
logger.error('tool', `Tool ${name} failed`, {
|
|
47
|
+
error: error instanceof Error ? error.message : String(error)
|
|
48
|
+
});
|
|
49
|
+
throw error;
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
//# sourceMappingURL=index.js.map
|