s3db.js 11.2.6 → 11.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +138 -0
- package/dist/s3db.cjs.js +2 -2
- package/dist/s3db.es.js +2 -2
- package/mcp/.env.example +117 -0
- package/mcp/{server.js → entrypoint.js} +1941 -683
- package/mcp/tools/bulk.js +112 -0
- package/mcp/tools/connection.js +228 -0
- package/mcp/tools/crud.js +579 -0
- package/mcp/tools/debugging.js +299 -0
- package/mcp/tools/export-import.js +281 -0
- package/mcp/tools/index.js +67 -0
- package/mcp/tools/partitions.js +223 -0
- package/mcp/tools/query.js +150 -0
- package/mcp/tools/resources.js +96 -0
- package/mcp/tools/stats.js +281 -0
- package/package.json +17 -7
- package/src/database.class.js +1 -1
|
@@ -0,0 +1,299 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Debugging Tools
|
|
3
|
+
* Provides inspection, validation, and health check capabilities
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
export const debuggingTools = [
|
|
7
|
+
{
|
|
8
|
+
name: 'dbInspectResource',
|
|
9
|
+
description: 'Inspect detailed information about a resource including schema, partitions, behaviors, and configuration',
|
|
10
|
+
inputSchema: {
|
|
11
|
+
type: 'object',
|
|
12
|
+
properties: {
|
|
13
|
+
resourceName: {
|
|
14
|
+
type: 'string',
|
|
15
|
+
description: 'Name of the resource to inspect'
|
|
16
|
+
}
|
|
17
|
+
},
|
|
18
|
+
required: ['resourceName']
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
{
|
|
22
|
+
name: 'dbGetMetadata',
|
|
23
|
+
description: 'Get raw metadata.json from the S3 bucket for debugging',
|
|
24
|
+
inputSchema: {
|
|
25
|
+
type: 'object',
|
|
26
|
+
properties: {},
|
|
27
|
+
required: []
|
|
28
|
+
}
|
|
29
|
+
},
|
|
30
|
+
{
|
|
31
|
+
name: 'resourceValidate',
|
|
32
|
+
description: 'Validate data against resource schema without inserting',
|
|
33
|
+
inputSchema: {
|
|
34
|
+
type: 'object',
|
|
35
|
+
properties: {
|
|
36
|
+
resourceName: {
|
|
37
|
+
type: 'string',
|
|
38
|
+
description: 'Name of the resource'
|
|
39
|
+
},
|
|
40
|
+
data: {
|
|
41
|
+
type: 'object',
|
|
42
|
+
description: 'Data to validate'
|
|
43
|
+
}
|
|
44
|
+
},
|
|
45
|
+
required: ['resourceName', 'data']
|
|
46
|
+
}
|
|
47
|
+
},
|
|
48
|
+
{
|
|
49
|
+
name: 'dbHealthCheck',
|
|
50
|
+
description: 'Perform comprehensive health check on database including orphaned partitions detection',
|
|
51
|
+
inputSchema: {
|
|
52
|
+
type: 'object',
|
|
53
|
+
properties: {
|
|
54
|
+
includeOrphanedPartitions: {
|
|
55
|
+
type: 'boolean',
|
|
56
|
+
description: 'Include orphaned partitions check',
|
|
57
|
+
default: true
|
|
58
|
+
}
|
|
59
|
+
},
|
|
60
|
+
required: []
|
|
61
|
+
}
|
|
62
|
+
},
|
|
63
|
+
{
|
|
64
|
+
name: 'resourceGetRaw',
|
|
65
|
+
description: 'Get raw S3 object data (metadata + body) for debugging',
|
|
66
|
+
inputSchema: {
|
|
67
|
+
type: 'object',
|
|
68
|
+
properties: {
|
|
69
|
+
resourceName: {
|
|
70
|
+
type: 'string',
|
|
71
|
+
description: 'Name of the resource'
|
|
72
|
+
},
|
|
73
|
+
id: {
|
|
74
|
+
type: 'string',
|
|
75
|
+
description: 'Document ID'
|
|
76
|
+
}
|
|
77
|
+
},
|
|
78
|
+
required: ['resourceName', 'id']
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
];
|
|
82
|
+
|
|
83
|
+
export function createDebuggingHandlers(server) {
|
|
84
|
+
return {
|
|
85
|
+
async dbInspectResource(args, database) {
|
|
86
|
+
server.ensureConnected(database);
|
|
87
|
+
const { resourceName } = args;
|
|
88
|
+
const resource = server.getResource(database, resourceName);
|
|
89
|
+
|
|
90
|
+
const inspection = {
|
|
91
|
+
success: true,
|
|
92
|
+
resource: {
|
|
93
|
+
name: resource.name,
|
|
94
|
+
behavior: resource.behavior,
|
|
95
|
+
version: resource.version,
|
|
96
|
+
createdBy: resource.createdBy || 'user',
|
|
97
|
+
|
|
98
|
+
schema: {
|
|
99
|
+
attributes: resource.attributes,
|
|
100
|
+
attributeCount: Object.keys(resource.attributes || {}).length,
|
|
101
|
+
fieldTypes: {}
|
|
102
|
+
},
|
|
103
|
+
|
|
104
|
+
partitions: resource.config.partitions ? {
|
|
105
|
+
count: Object.keys(resource.config.partitions).length,
|
|
106
|
+
definitions: resource.config.partitions,
|
|
107
|
+
orphaned: resource.findOrphanedPartitions ? resource.findOrphanedPartitions() : null
|
|
108
|
+
} : null,
|
|
109
|
+
|
|
110
|
+
configuration: {
|
|
111
|
+
timestamps: resource.config.timestamps,
|
|
112
|
+
paranoid: resource.config.paranoid,
|
|
113
|
+
strictValidation: resource.strictValidation,
|
|
114
|
+
asyncPartitions: resource.config.asyncPartitions,
|
|
115
|
+
versioningEnabled: resource.config.versioningEnabled,
|
|
116
|
+
autoDecrypt: resource.config.autoDecrypt
|
|
117
|
+
},
|
|
118
|
+
|
|
119
|
+
hooks: resource.config.hooks ? {
|
|
120
|
+
beforeInsert: resource.config.hooks.beforeInsert?.length || 0,
|
|
121
|
+
afterInsert: resource.config.hooks.afterInsert?.length || 0,
|
|
122
|
+
beforeUpdate: resource.config.hooks.beforeUpdate?.length || 0,
|
|
123
|
+
afterUpdate: resource.config.hooks.afterUpdate?.length || 0,
|
|
124
|
+
beforeDelete: resource.config.hooks.beforeDelete?.length || 0,
|
|
125
|
+
afterDelete: resource.config.hooks.afterDelete?.length || 0
|
|
126
|
+
} : null,
|
|
127
|
+
|
|
128
|
+
s3Paths: {
|
|
129
|
+
metadataKey: `${database.keyPrefix}metadata.json`,
|
|
130
|
+
resourcePrefix: `${database.keyPrefix}resource=${resourceName}/`
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
};
|
|
134
|
+
|
|
135
|
+
// Analyze field types
|
|
136
|
+
for (const [fieldName, fieldDef] of Object.entries(resource.attributes || {})) {
|
|
137
|
+
const typeStr = typeof fieldDef === 'string' ? fieldDef : fieldDef.type;
|
|
138
|
+
inspection.resource.schema.fieldTypes[fieldName] = typeStr;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
return inspection;
|
|
142
|
+
},
|
|
143
|
+
|
|
144
|
+
async dbGetMetadata(args, database) {
|
|
145
|
+
server.ensureConnected(database);
|
|
146
|
+
|
|
147
|
+
const metadataKey = `${database.keyPrefix}metadata.json`;
|
|
148
|
+
|
|
149
|
+
try {
|
|
150
|
+
const response = await database.client.getObject({
|
|
151
|
+
Bucket: database.bucket,
|
|
152
|
+
Key: metadataKey
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
const metadataContent = await response.Body.transformToString();
|
|
156
|
+
const metadata = JSON.parse(metadataContent);
|
|
157
|
+
|
|
158
|
+
return {
|
|
159
|
+
success: true,
|
|
160
|
+
metadata,
|
|
161
|
+
s3Info: {
|
|
162
|
+
key: metadataKey,
|
|
163
|
+
bucket: database.bucket,
|
|
164
|
+
lastModified: response.LastModified,
|
|
165
|
+
size: response.ContentLength,
|
|
166
|
+
etag: response.ETag
|
|
167
|
+
}
|
|
168
|
+
};
|
|
169
|
+
} catch (error) {
|
|
170
|
+
return {
|
|
171
|
+
success: false,
|
|
172
|
+
error: error.message,
|
|
173
|
+
key: metadataKey
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
},
|
|
177
|
+
|
|
178
|
+
async resourceValidate(args, database) {
|
|
179
|
+
server.ensureConnected(database);
|
|
180
|
+
const { resourceName, data } = args;
|
|
181
|
+
const resource = server.getResource(database, resourceName);
|
|
182
|
+
|
|
183
|
+
try {
|
|
184
|
+
// Use the schema validator if available
|
|
185
|
+
const validationResult = resource.schema.validate(data);
|
|
186
|
+
|
|
187
|
+
return {
|
|
188
|
+
success: true,
|
|
189
|
+
valid: validationResult === true,
|
|
190
|
+
errors: validationResult === true ? [] : validationResult,
|
|
191
|
+
data: data
|
|
192
|
+
};
|
|
193
|
+
} catch (error) {
|
|
194
|
+
return {
|
|
195
|
+
success: false,
|
|
196
|
+
valid: false,
|
|
197
|
+
error: error.message,
|
|
198
|
+
data: data
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
},
|
|
202
|
+
|
|
203
|
+
async dbHealthCheck(args, database) {
|
|
204
|
+
server.ensureConnected(database);
|
|
205
|
+
const { includeOrphanedPartitions = true } = args;
|
|
206
|
+
|
|
207
|
+
const health = {
|
|
208
|
+
success: true,
|
|
209
|
+
timestamp: new Date().toISOString(),
|
|
210
|
+
database: {
|
|
211
|
+
connected: database.isConnected(),
|
|
212
|
+
bucket: database.bucket,
|
|
213
|
+
keyPrefix: database.keyPrefix,
|
|
214
|
+
version: database.s3dbVersion
|
|
215
|
+
},
|
|
216
|
+
resources: {
|
|
217
|
+
total: Object.keys(database.resources || {}).length,
|
|
218
|
+
list: Object.keys(database.resources || {}),
|
|
219
|
+
details: {}
|
|
220
|
+
},
|
|
221
|
+
issues: []
|
|
222
|
+
};
|
|
223
|
+
|
|
224
|
+
// Check each resource
|
|
225
|
+
for (const [name, resource] of Object.entries(database.resources || {})) {
|
|
226
|
+
const resourceHealth = {
|
|
227
|
+
name,
|
|
228
|
+
behavior: resource.behavior,
|
|
229
|
+
attributeCount: Object.keys(resource.attributes || {}).length,
|
|
230
|
+
partitionCount: resource.config.partitions ? Object.keys(resource.config.partitions).length : 0
|
|
231
|
+
};
|
|
232
|
+
|
|
233
|
+
// Check for orphaned partitions
|
|
234
|
+
if (includeOrphanedPartitions && resource.findOrphanedPartitions) {
|
|
235
|
+
const orphaned = resource.findOrphanedPartitions();
|
|
236
|
+
if (Object.keys(orphaned).length > 0) {
|
|
237
|
+
resourceHealth.orphanedPartitions = orphaned;
|
|
238
|
+
health.issues.push({
|
|
239
|
+
severity: 'warning',
|
|
240
|
+
resource: name,
|
|
241
|
+
type: 'orphaned_partitions',
|
|
242
|
+
message: `Resource '${name}' has ${Object.keys(orphaned).length} orphaned partition(s)`,
|
|
243
|
+
details: orphaned
|
|
244
|
+
});
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
health.resources.details[name] = resourceHealth;
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
health.healthy = health.issues.length === 0;
|
|
252
|
+
|
|
253
|
+
return health;
|
|
254
|
+
},
|
|
255
|
+
|
|
256
|
+
async resourceGetRaw(args, database) {
|
|
257
|
+
server.ensureConnected(database);
|
|
258
|
+
const { resourceName, id } = args;
|
|
259
|
+
const resource = server.getResource(database, resourceName);
|
|
260
|
+
|
|
261
|
+
try {
|
|
262
|
+
// Build S3 key
|
|
263
|
+
const key = `${database.keyPrefix}resource=${resourceName}/id=${id}.json`;
|
|
264
|
+
|
|
265
|
+
const response = await database.client.getObject({
|
|
266
|
+
Bucket: database.bucket,
|
|
267
|
+
Key: key
|
|
268
|
+
});
|
|
269
|
+
|
|
270
|
+
const body = await response.Body.transformToString();
|
|
271
|
+
const bodyData = body ? JSON.parse(body) : null;
|
|
272
|
+
|
|
273
|
+
return {
|
|
274
|
+
success: true,
|
|
275
|
+
s3Object: {
|
|
276
|
+
key,
|
|
277
|
+
bucket: database.bucket,
|
|
278
|
+
metadata: response.Metadata || {},
|
|
279
|
+
contentLength: response.ContentLength,
|
|
280
|
+
lastModified: response.LastModified,
|
|
281
|
+
etag: response.ETag,
|
|
282
|
+
contentType: response.ContentType
|
|
283
|
+
},
|
|
284
|
+
data: {
|
|
285
|
+
metadata: response.Metadata,
|
|
286
|
+
body: bodyData
|
|
287
|
+
}
|
|
288
|
+
};
|
|
289
|
+
} catch (error) {
|
|
290
|
+
return {
|
|
291
|
+
success: false,
|
|
292
|
+
error: error.message,
|
|
293
|
+
id,
|
|
294
|
+
resource: resourceName
|
|
295
|
+
};
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
};
|
|
299
|
+
}
|
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Export/Import Tools
|
|
3
|
+
* Handles data export (JSON, CSV, NDJSON), import, and metadata backup
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
export const exportImportTools = [
|
|
7
|
+
{
|
|
8
|
+
name: 'resourceExport',
|
|
9
|
+
description: 'Export resource data to JSON, CSV, or NDJSON format',
|
|
10
|
+
inputSchema: {
|
|
11
|
+
type: 'object',
|
|
12
|
+
properties: {
|
|
13
|
+
resourceName: {
|
|
14
|
+
type: 'string',
|
|
15
|
+
description: 'Name of the resource'
|
|
16
|
+
},
|
|
17
|
+
format: {
|
|
18
|
+
type: 'string',
|
|
19
|
+
description: 'Export format',
|
|
20
|
+
enum: ['json', 'ndjson', 'csv'],
|
|
21
|
+
default: 'json'
|
|
22
|
+
},
|
|
23
|
+
filters: {
|
|
24
|
+
type: 'object',
|
|
25
|
+
description: 'Optional filters to export subset of data'
|
|
26
|
+
},
|
|
27
|
+
fields: {
|
|
28
|
+
type: 'array',
|
|
29
|
+
items: { type: 'string' },
|
|
30
|
+
description: 'Specific fields to export (exports all if not specified)'
|
|
31
|
+
},
|
|
32
|
+
limit: {
|
|
33
|
+
type: 'number',
|
|
34
|
+
description: 'Maximum number of records to export'
|
|
35
|
+
}
|
|
36
|
+
},
|
|
37
|
+
required: ['resourceName']
|
|
38
|
+
}
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
name: 'resourceImport',
|
|
42
|
+
description: 'Import data from JSON or NDJSON format into a resource',
|
|
43
|
+
inputSchema: {
|
|
44
|
+
type: 'object',
|
|
45
|
+
properties: {
|
|
46
|
+
resourceName: {
|
|
47
|
+
type: 'string',
|
|
48
|
+
description: 'Name of the resource'
|
|
49
|
+
},
|
|
50
|
+
data: {
|
|
51
|
+
type: 'array',
|
|
52
|
+
description: 'Array of documents to import'
|
|
53
|
+
},
|
|
54
|
+
mode: {
|
|
55
|
+
type: 'string',
|
|
56
|
+
description: 'Import mode',
|
|
57
|
+
enum: ['insert', 'upsert', 'replace'],
|
|
58
|
+
default: 'insert'
|
|
59
|
+
},
|
|
60
|
+
batchSize: {
|
|
61
|
+
type: 'number',
|
|
62
|
+
description: 'Batch size for bulk operations',
|
|
63
|
+
default: 100
|
|
64
|
+
}
|
|
65
|
+
},
|
|
66
|
+
required: ['resourceName', 'data']
|
|
67
|
+
}
|
|
68
|
+
},
|
|
69
|
+
{
|
|
70
|
+
name: 'dbBackupMetadata',
|
|
71
|
+
description: 'Create a backup of the metadata.json file',
|
|
72
|
+
inputSchema: {
|
|
73
|
+
type: 'object',
|
|
74
|
+
properties: {
|
|
75
|
+
timestamp: {
|
|
76
|
+
type: 'boolean',
|
|
77
|
+
description: 'Include timestamp in backup name',
|
|
78
|
+
default: true
|
|
79
|
+
}
|
|
80
|
+
},
|
|
81
|
+
required: []
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
];
|
|
85
|
+
|
|
86
|
+
export function createExportImportHandlers(server) {
|
|
87
|
+
return {
|
|
88
|
+
async resourceExport(args, database) {
|
|
89
|
+
server.ensureConnected(database);
|
|
90
|
+
const { resourceName, format = 'json', filters, fields, limit } = args;
|
|
91
|
+
const resource = server.getResource(database, resourceName);
|
|
92
|
+
|
|
93
|
+
try {
|
|
94
|
+
// Get data
|
|
95
|
+
let data;
|
|
96
|
+
if (filters) {
|
|
97
|
+
data = await resource.query(filters, limit ? { limit } : {});
|
|
98
|
+
} else if (limit) {
|
|
99
|
+
data = await resource.list({ limit });
|
|
100
|
+
} else {
|
|
101
|
+
data = await resource.getAll();
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
// Filter fields if specified
|
|
105
|
+
if (fields && fields.length > 0) {
|
|
106
|
+
data = data.map(doc => {
|
|
107
|
+
const filtered = {};
|
|
108
|
+
for (const field of fields) {
|
|
109
|
+
if (doc[field] !== undefined) {
|
|
110
|
+
filtered[field] = doc[field];
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
return filtered;
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
let exportData;
|
|
118
|
+
let contentType;
|
|
119
|
+
|
|
120
|
+
switch (format) {
|
|
121
|
+
case 'json':
|
|
122
|
+
exportData = JSON.stringify(data, null, 2);
|
|
123
|
+
contentType = 'application/json';
|
|
124
|
+
break;
|
|
125
|
+
|
|
126
|
+
case 'ndjson':
|
|
127
|
+
exportData = data.map(doc => JSON.stringify(doc)).join('\n');
|
|
128
|
+
contentType = 'application/x-ndjson';
|
|
129
|
+
break;
|
|
130
|
+
|
|
131
|
+
case 'csv':
|
|
132
|
+
// Simple CSV conversion
|
|
133
|
+
if (data.length === 0) {
|
|
134
|
+
exportData = '';
|
|
135
|
+
} else {
|
|
136
|
+
const headers = Object.keys(data[0]);
|
|
137
|
+
const csvRows = [headers.join(',')];
|
|
138
|
+
for (const doc of data) {
|
|
139
|
+
const row = headers.map(h => {
|
|
140
|
+
const val = doc[h];
|
|
141
|
+
if (val === null || val === undefined) return '';
|
|
142
|
+
if (typeof val === 'object') return JSON.stringify(val);
|
|
143
|
+
return String(val).includes(',') ? `"${val}"` : val;
|
|
144
|
+
});
|
|
145
|
+
csvRows.push(row.join(','));
|
|
146
|
+
}
|
|
147
|
+
exportData = csvRows.join('\n');
|
|
148
|
+
}
|
|
149
|
+
contentType = 'text/csv';
|
|
150
|
+
break;
|
|
151
|
+
|
|
152
|
+
default:
|
|
153
|
+
throw new Error(`Unsupported format: ${format}`);
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
return {
|
|
157
|
+
success: true,
|
|
158
|
+
resource: resourceName,
|
|
159
|
+
format,
|
|
160
|
+
recordCount: data.length,
|
|
161
|
+
exportData,
|
|
162
|
+
contentType,
|
|
163
|
+
size: exportData.length
|
|
164
|
+
};
|
|
165
|
+
} catch (error) {
|
|
166
|
+
return {
|
|
167
|
+
success: false,
|
|
168
|
+
error: error.message,
|
|
169
|
+
resource: resourceName,
|
|
170
|
+
format
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
},
|
|
174
|
+
|
|
175
|
+
async resourceImport(args, database) {
|
|
176
|
+
server.ensureConnected(database);
|
|
177
|
+
const { resourceName, data, mode = 'insert', batchSize = 100 } = args;
|
|
178
|
+
const resource = server.getResource(database, resourceName);
|
|
179
|
+
|
|
180
|
+
try {
|
|
181
|
+
const results = [];
|
|
182
|
+
let processed = 0;
|
|
183
|
+
|
|
184
|
+
// Process in batches
|
|
185
|
+
for (let i = 0; i < data.length; i += batchSize) {
|
|
186
|
+
const batch = data.slice(i, i + batchSize);
|
|
187
|
+
|
|
188
|
+
let batchResults;
|
|
189
|
+
switch (mode) {
|
|
190
|
+
case 'insert':
|
|
191
|
+
batchResults = await resource.insertMany(batch);
|
|
192
|
+
break;
|
|
193
|
+
|
|
194
|
+
case 'upsert':
|
|
195
|
+
batchResults = await Promise.all(batch.map(doc => resource.upsert(doc)));
|
|
196
|
+
break;
|
|
197
|
+
|
|
198
|
+
case 'replace':
|
|
199
|
+
// Delete all first if first batch
|
|
200
|
+
if (i === 0) {
|
|
201
|
+
await resource.deleteAll();
|
|
202
|
+
}
|
|
203
|
+
batchResults = await resource.insertMany(batch);
|
|
204
|
+
break;
|
|
205
|
+
|
|
206
|
+
default:
|
|
207
|
+
throw new Error(`Unsupported mode: ${mode}`);
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
results.push(...batchResults);
|
|
211
|
+
processed += batch.length;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
return {
|
|
215
|
+
success: true,
|
|
216
|
+
resource: resourceName,
|
|
217
|
+
mode,
|
|
218
|
+
importedCount: results.length,
|
|
219
|
+
totalRecords: data.length,
|
|
220
|
+
batchSize
|
|
221
|
+
};
|
|
222
|
+
} catch (error) {
|
|
223
|
+
return {
|
|
224
|
+
success: false,
|
|
225
|
+
error: error.message,
|
|
226
|
+
resource: resourceName,
|
|
227
|
+
mode,
|
|
228
|
+
processed
|
|
229
|
+
};
|
|
230
|
+
}
|
|
231
|
+
},
|
|
232
|
+
|
|
233
|
+
async dbBackupMetadata(args, database) {
|
|
234
|
+
server.ensureConnected(database);
|
|
235
|
+
const { timestamp = true } = args;
|
|
236
|
+
|
|
237
|
+
try {
|
|
238
|
+
const metadataKey = `${database.keyPrefix}metadata.json`;
|
|
239
|
+
|
|
240
|
+
// Read current metadata
|
|
241
|
+
const response = await database.client.getObject({
|
|
242
|
+
Bucket: database.bucket,
|
|
243
|
+
Key: metadataKey
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
const metadataContent = await response.Body.transformToString();
|
|
247
|
+
|
|
248
|
+
// Create backup key
|
|
249
|
+
const backupSuffix = timestamp ? `-backup-${Date.now()}` : '-backup';
|
|
250
|
+
const backupKey = metadataKey.replace('.json', `${backupSuffix}.json`);
|
|
251
|
+
|
|
252
|
+
// Save backup
|
|
253
|
+
await database.client.putObject({
|
|
254
|
+
Bucket: database.bucket,
|
|
255
|
+
Key: backupKey,
|
|
256
|
+
Body: metadataContent,
|
|
257
|
+
ContentType: 'application/json'
|
|
258
|
+
});
|
|
259
|
+
|
|
260
|
+
return {
|
|
261
|
+
success: true,
|
|
262
|
+
message: 'Metadata backup created',
|
|
263
|
+
backup: {
|
|
264
|
+
key: backupKey,
|
|
265
|
+
bucket: database.bucket,
|
|
266
|
+
timestamp: new Date().toISOString(),
|
|
267
|
+
size: metadataContent.length
|
|
268
|
+
},
|
|
269
|
+
original: {
|
|
270
|
+
key: metadataKey
|
|
271
|
+
}
|
|
272
|
+
};
|
|
273
|
+
} catch (error) {
|
|
274
|
+
return {
|
|
275
|
+
success: false,
|
|
276
|
+
error: error.message
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
};
|
|
281
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MCP Tools Registry
|
|
3
|
+
* Exports all tool definitions and handlers organized by domain
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { connectionTools, createConnectionHandlers } from './connection.js';
|
|
7
|
+
import { resourceManagementTools, createResourceManagementHandlers } from './resources.js';
|
|
8
|
+
import { crudTools, createCrudHandlers } from './crud.js';
|
|
9
|
+
import { debuggingTools, createDebuggingHandlers } from './debugging.js';
|
|
10
|
+
import { queryTools, createQueryHandlers } from './query.js';
|
|
11
|
+
import { partitionTools, createPartitionHandlers } from './partitions.js';
|
|
12
|
+
import { bulkTools, createBulkHandlers } from './bulk.js';
|
|
13
|
+
import { exportImportTools, createExportImportHandlers } from './export-import.js';
|
|
14
|
+
import { statsTools, createStatsHandlers } from './stats.js';
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Get all tool definitions
|
|
18
|
+
*/
|
|
19
|
+
export function getAllTools() {
|
|
20
|
+
return [
|
|
21
|
+
...connectionTools,
|
|
22
|
+
...resourceManagementTools,
|
|
23
|
+
...crudTools,
|
|
24
|
+
...debuggingTools,
|
|
25
|
+
...queryTools,
|
|
26
|
+
...partitionTools,
|
|
27
|
+
...bulkTools,
|
|
28
|
+
...exportImportTools,
|
|
29
|
+
...statsTools
|
|
30
|
+
];
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Create all tool handlers
|
|
35
|
+
* @param {Object} server - Server instance with helper methods
|
|
36
|
+
* @returns {Object} Map of tool name -> handler function
|
|
37
|
+
*/
|
|
38
|
+
export function createAllHandlers(server) {
|
|
39
|
+
return {
|
|
40
|
+
...createConnectionHandlers(server),
|
|
41
|
+
...createResourceManagementHandlers(server),
|
|
42
|
+
...createCrudHandlers(server),
|
|
43
|
+
...createDebuggingHandlers(server),
|
|
44
|
+
...createQueryHandlers(server),
|
|
45
|
+
...createPartitionHandlers(server),
|
|
46
|
+
...createBulkHandlers(server),
|
|
47
|
+
...createExportImportHandlers(server),
|
|
48
|
+
...createStatsHandlers(server)
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
/**
|
|
53
|
+
* Get tools organized by category
|
|
54
|
+
*/
|
|
55
|
+
export function getToolsByCategory() {
|
|
56
|
+
return {
|
|
57
|
+
connection: connectionTools,
|
|
58
|
+
resources: resourceManagementTools,
|
|
59
|
+
crud: crudTools,
|
|
60
|
+
debugging: debuggingTools,
|
|
61
|
+
query: queryTools,
|
|
62
|
+
partitions: partitionTools,
|
|
63
|
+
bulk: bulkTools,
|
|
64
|
+
exportImport: exportImportTools,
|
|
65
|
+
stats: statsTools
|
|
66
|
+
};
|
|
67
|
+
}
|