@gainable.dev/mcp-server 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/collectionSchema.d.ts +45 -0
- package/dist/collectionSchema.js +221 -0
- package/dist/config.d.ts +11 -0
- package/dist/config.js +31 -0
- package/dist/gainableHttpRunner.d.ts +123 -0
- package/dist/gainableHttpRunner.js +54 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +49 -0
- package/dist/scopedConnectionManager.d.ts +7 -0
- package/dist/scopedConnectionManager.js +57 -0
- package/dist/scopedProvider.d.ts +2 -0
- package/dist/scopedProvider.js +130 -0
- package/dist/scoping/index.d.ts +4 -0
- package/dist/scoping/index.js +3 -0
- package/dist/scoping/injectScope.d.ts +4 -0
- package/dist/scoping/injectScope.js +77 -0
- package/dist/scoping/resolveCollection.d.ts +8 -0
- package/dist/scoping/resolveCollection.js +26 -0
- package/dist/scoping/sanitizePipeline.d.ts +2 -0
- package/dist/scoping/sanitizePipeline.js +30 -0
- package/dist/stripUntrustedTags.d.ts +19 -0
- package/dist/stripUntrustedTags.js +96 -0
- package/dist/writeValidation.d.ts +19 -0
- package/dist/writeValidation.js +151 -0
- package/package.json +34 -0
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Inferred field info from sampling actual documents.
|
|
3
|
+
*/
|
|
4
|
+
export interface FieldInfo {
|
|
5
|
+
name: string;
|
|
6
|
+
types: string[];
|
|
7
|
+
/** Sample distinct values for low-cardinality fields (enums). */
|
|
8
|
+
enumValues?: string[];
|
|
9
|
+
/** Whether this field appears to reference another collection (ObjectId). */
|
|
10
|
+
isReference?: boolean;
|
|
11
|
+
}
|
|
12
|
+
export interface CollectionSchemaInfo {
|
|
13
|
+
collection: string;
|
|
14
|
+
fields: FieldInfo[];
|
|
15
|
+
namingConvention: 'camelCase' | 'snake_case' | 'mixed';
|
|
16
|
+
inferredAt: Date;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Initialize the schema inference connection.
|
|
20
|
+
* Call once at startup with the MongoDB URI and database name.
|
|
21
|
+
*/
|
|
22
|
+
export declare function initSchemaInference(mongoUri: string, dbName: string): Promise<void>;
|
|
23
|
+
/**
|
|
24
|
+
* Pre-warm the schema cache for known collections at startup.
|
|
25
|
+
* This ensures synchronous cache lookups work from the very first query.
|
|
26
|
+
*/
|
|
27
|
+
export declare function preWarmSchemaCache(appName: string, allowedDatasets: string[]): Promise<void>;
|
|
28
|
+
/**
|
|
29
|
+
* Infer schema from a collection by sampling documents.
|
|
30
|
+
* Results are cached for the lifetime of the process.
|
|
31
|
+
*/
|
|
32
|
+
export declare function getCollectionSchema(realCollectionName: string, cleanName: string): Promise<CollectionSchemaInfo | undefined>;
|
|
33
|
+
/**
|
|
34
|
+
* Synchronous cache-only lookup. Returns the schema if already cached, undefined otherwise.
|
|
35
|
+
* Use this in code paths that cannot be async (e.g. provider.find returns a cursor, not a promise).
|
|
36
|
+
*/
|
|
37
|
+
export declare function getCachedSchema(realCollectionName: string): CollectionSchemaInfo | undefined;
|
|
38
|
+
/**
|
|
39
|
+
* Invalidate cached schema for a collection (call after writes).
|
|
40
|
+
*/
|
|
41
|
+
export declare function invalidateSchema(realCollectionName: string): void;
|
|
42
|
+
/**
|
|
43
|
+
* Format schema info as a hint string to append to tool responses.
|
|
44
|
+
*/
|
|
45
|
+
export declare function formatSchemaHint(schema: CollectionSchemaInfo): string;
|
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
import { MongoClient } from 'mongodb';
|
|
2
|
+
import { CORE_EXPOSED as CORE_COLLECTIONS } from './scoping/index.js';
|
|
3
|
+
/**
|
|
4
|
+
* Cache of inferred schemas keyed by real collection name.
|
|
5
|
+
*/
|
|
6
|
+
const schemaCache = new Map();
|
|
7
|
+
/**
|
|
8
|
+
* Singleton DB connection for schema inference.
|
|
9
|
+
*/
|
|
10
|
+
let schemaDb;
|
|
11
|
+
let schemaClient;
|
|
12
|
+
/**
|
|
13
|
+
* Initialize the schema inference connection.
|
|
14
|
+
* Call once at startup with the MongoDB URI and database name.
|
|
15
|
+
*/
|
|
16
|
+
export async function initSchemaInference(mongoUri, dbName) {
|
|
17
|
+
if (schemaDb)
|
|
18
|
+
return;
|
|
19
|
+
schemaClient = new MongoClient(mongoUri);
|
|
20
|
+
await schemaClient.connect();
|
|
21
|
+
schemaDb = schemaClient.db(dbName);
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Pre-warm the schema cache for known collections at startup.
|
|
25
|
+
* This ensures synchronous cache lookups work from the very first query.
|
|
26
|
+
*/
|
|
27
|
+
export async function preWarmSchemaCache(appName, allowedDatasets) {
|
|
28
|
+
if (!schemaDb)
|
|
29
|
+
return;
|
|
30
|
+
try {
|
|
31
|
+
const collections = await schemaDb.listCollections().toArray();
|
|
32
|
+
for (const col of collections) {
|
|
33
|
+
const name = col.name;
|
|
34
|
+
// App's custom collections
|
|
35
|
+
if (name.startsWith(`${appName}_`)) {
|
|
36
|
+
const cleanName = name.slice(appName.length + 1);
|
|
37
|
+
await getCollectionSchema(name, cleanName);
|
|
38
|
+
continue;
|
|
39
|
+
}
|
|
40
|
+
// Allowed data collections
|
|
41
|
+
for (const datasetId of allowedDatasets) {
|
|
42
|
+
if (name === `data_${datasetId}`) {
|
|
43
|
+
await getCollectionSchema(name, datasetId);
|
|
44
|
+
break;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
// Core collections (e.g. users)
|
|
48
|
+
if (CORE_COLLECTIONS.includes(name)) {
|
|
49
|
+
await getCollectionSchema(name, name);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
console.log(`[Schema] Pre-warmed cache for ${schemaCache.size} collections`);
|
|
53
|
+
}
|
|
54
|
+
catch (err) {
|
|
55
|
+
console.warn('[Schema] Pre-warm failed:', err);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Infer schema from a collection by sampling documents.
|
|
60
|
+
* Results are cached for the lifetime of the process.
|
|
61
|
+
*/
|
|
62
|
+
export async function getCollectionSchema(realCollectionName, cleanName) {
|
|
63
|
+
if (!schemaDb)
|
|
64
|
+
return undefined;
|
|
65
|
+
const cached = schemaCache.get(realCollectionName);
|
|
66
|
+
if (cached)
|
|
67
|
+
return cached;
|
|
68
|
+
try {
|
|
69
|
+
const docs = await schemaDb.collection(realCollectionName)
|
|
70
|
+
.find({})
|
|
71
|
+
.limit(50)
|
|
72
|
+
.toArray();
|
|
73
|
+
if (docs.length === 0)
|
|
74
|
+
return undefined;
|
|
75
|
+
const schema = inferSchema(cleanName, docs);
|
|
76
|
+
schemaCache.set(realCollectionName, schema);
|
|
77
|
+
return schema;
|
|
78
|
+
}
|
|
79
|
+
catch {
|
|
80
|
+
return undefined;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Synchronous cache-only lookup. Returns the schema if already cached, undefined otherwise.
|
|
85
|
+
* Use this in code paths that cannot be async (e.g. provider.find returns a cursor, not a promise).
|
|
86
|
+
*/
|
|
87
|
+
export function getCachedSchema(realCollectionName) {
|
|
88
|
+
return schemaCache.get(realCollectionName);
|
|
89
|
+
}
|
|
90
|
+
/**
|
|
91
|
+
* Invalidate cached schema for a collection (call after writes).
|
|
92
|
+
*/
|
|
93
|
+
export function invalidateSchema(realCollectionName) {
|
|
94
|
+
schemaCache.delete(realCollectionName);
|
|
95
|
+
}
|
|
96
|
+
function inferSchema(cleanName, docs) {
|
|
97
|
+
const fieldMap = new Map();
|
|
98
|
+
for (const doc of docs) {
|
|
99
|
+
for (const [key, value] of Object.entries(doc)) {
|
|
100
|
+
if (key === '_id' || key === '__v' || key === 'createdAt' || key === 'updatedAt')
|
|
101
|
+
continue;
|
|
102
|
+
let entry = fieldMap.get(key);
|
|
103
|
+
if (!entry) {
|
|
104
|
+
entry = { types: new Set(), values: new Set(), count: 0 };
|
|
105
|
+
fieldMap.set(key, entry);
|
|
106
|
+
}
|
|
107
|
+
entry.count++;
|
|
108
|
+
const typeName = getTypeName(value);
|
|
109
|
+
entry.types.add(typeName);
|
|
110
|
+
// Track values for potential enums (strings only, low cardinality)
|
|
111
|
+
if (typeof value === 'string' && value.length < 50) {
|
|
112
|
+
entry.values.add(value);
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
const fields = [];
|
|
117
|
+
// Filter out outlier fields that appear in less than 20% of documents.
|
|
118
|
+
// These are likely from bad inserts and shouldn't be part of the canonical schema.
|
|
119
|
+
const minFieldCount = Math.max(1, Math.floor(docs.length * 0.2));
|
|
120
|
+
for (const [name, entry] of fieldMap) {
|
|
121
|
+
if (docs.length > 3 && entry.count < minFieldCount)
|
|
122
|
+
continue;
|
|
123
|
+
const field = {
|
|
124
|
+
name,
|
|
125
|
+
types: [...entry.types],
|
|
126
|
+
};
|
|
127
|
+
// If a string field has very few distinct values relative to docs, it's likely an enum.
|
|
128
|
+
// Require at least 3x more docs than distinct values to avoid treating open-ended
|
|
129
|
+
// fields (like customer names) as enums.
|
|
130
|
+
if (entry.types.has('string') &&
|
|
131
|
+
entry.values.size > 0 &&
|
|
132
|
+
entry.values.size <= 10 &&
|
|
133
|
+
entry.count >= entry.values.size * 3) {
|
|
134
|
+
field.enumValues = [...entry.values].sort();
|
|
135
|
+
}
|
|
136
|
+
// ObjectId fields that aren't the doc's own _id are likely references
|
|
137
|
+
if (entry.types.has('ObjectId')) {
|
|
138
|
+
field.isReference = true;
|
|
139
|
+
}
|
|
140
|
+
fields.push(field);
|
|
141
|
+
}
|
|
142
|
+
const namingConvention = detectNamingConvention(fields.map(f => f.name));
|
|
143
|
+
return {
|
|
144
|
+
collection: cleanName,
|
|
145
|
+
fields,
|
|
146
|
+
namingConvention,
|
|
147
|
+
inferredAt: new Date(),
|
|
148
|
+
};
|
|
149
|
+
}
|
|
150
|
+
function getTypeName(value) {
|
|
151
|
+
if (value === null || value === undefined)
|
|
152
|
+
return 'null';
|
|
153
|
+
if (typeof value === 'string')
|
|
154
|
+
return 'string';
|
|
155
|
+
if (typeof value === 'number')
|
|
156
|
+
return 'number';
|
|
157
|
+
if (typeof value === 'boolean')
|
|
158
|
+
return 'boolean';
|
|
159
|
+
if (value instanceof Date)
|
|
160
|
+
return 'ISODate';
|
|
161
|
+
if (Array.isArray(value))
|
|
162
|
+
return 'array';
|
|
163
|
+
// Check for BSON types
|
|
164
|
+
if (value._bsontype === 'ObjectId' || value._bsontype === 'ObjectID')
|
|
165
|
+
return 'ObjectId';
|
|
166
|
+
if (value._bsontype === 'Decimal128')
|
|
167
|
+
return 'Decimal128';
|
|
168
|
+
// MongoDB driver ObjectId
|
|
169
|
+
if (typeof value.toHexString === 'function' && String(value).length === 24)
|
|
170
|
+
return 'ObjectId';
|
|
171
|
+
// Dates that come through as objects
|
|
172
|
+
if (value instanceof Object && value.constructor?.name === 'Date')
|
|
173
|
+
return 'ISODate';
|
|
174
|
+
return 'object';
|
|
175
|
+
}
|
|
176
|
+
function detectNamingConvention(fieldNames) {
|
|
177
|
+
let camelCount = 0;
|
|
178
|
+
let snakeCount = 0;
|
|
179
|
+
for (const name of fieldNames) {
|
|
180
|
+
if (name.includes('_'))
|
|
181
|
+
snakeCount++;
|
|
182
|
+
else if (/[a-z][A-Z]/.test(name))
|
|
183
|
+
camelCount++;
|
|
184
|
+
}
|
|
185
|
+
if (camelCount > 0 && snakeCount === 0)
|
|
186
|
+
return 'camelCase';
|
|
187
|
+
if (snakeCount > 0 && camelCount === 0)
|
|
188
|
+
return 'snake_case';
|
|
189
|
+
return 'mixed';
|
|
190
|
+
}
|
|
191
|
+
/**
|
|
192
|
+
* Format schema info as a hint string to append to tool responses.
|
|
193
|
+
*/
|
|
194
|
+
export function formatSchemaHint(schema) {
|
|
195
|
+
const hasReferences = schema.fields.some(f => f.isReference);
|
|
196
|
+
const lines = [`SCHEMA for "${schema.collection}":`];
|
|
197
|
+
for (const field of schema.fields) {
|
|
198
|
+
let desc = ` ${field.name}: ${field.types.join('|')}`;
|
|
199
|
+
if (field.isReference) {
|
|
200
|
+
desc += ' (reference - when the user gives a name, automatically look it up in the related collection to get the _id. Never ask the user for an ID. When writing, use EJSON format: {"$oid": "the_id_here"})';
|
|
201
|
+
}
|
|
202
|
+
if (field.enumValues && field.enumValues.length > 0) {
|
|
203
|
+
desc += ` [valid values: ${field.enumValues.map(v => `"${v}"`).join(', ')}]`;
|
|
204
|
+
}
|
|
205
|
+
lines.push(desc);
|
|
206
|
+
}
|
|
207
|
+
if (schema.namingConvention !== 'mixed') {
|
|
208
|
+
lines.push(`Field naming convention: ${schema.namingConvention}. Use this convention for all writes.`);
|
|
209
|
+
}
|
|
210
|
+
lines.push('IMPORTANT UX RULES:');
|
|
211
|
+
lines.push('- Never show ObjectIds, internal field names, or technical database details to the user.');
|
|
212
|
+
lines.push('- Ask for fields using friendly names (e.g. "Customer" not "customer: string", "Assigned to" not "assignedTo: ObjectId").');
|
|
213
|
+
if (hasReferences) {
|
|
214
|
+
lines.push('- For reference fields: accept human-readable names from the user (e.g. a person name), then silently look up the _id in the appropriate collection (e.g. users) using the find tool. Never ask the user for an ID or ObjectId.');
|
|
215
|
+
}
|
|
216
|
+
lines.push('- For date fields: accept any common format from the user. When writing, use EJSON format: {"$date": "2025-05-05T00:00:00Z"}.');
|
|
217
|
+
lines.push('- For enum fields: if the user gives the wrong casing, silently correct it.');
|
|
218
|
+
lines.push('- BEFORE QUERYING: Always use the collection-schema tool first to discover the correct field names. Never guess field names.');
|
|
219
|
+
lines.push('- VERIFY WRITES: After inserting or updating, query the document back to confirm it was saved correctly. Show the user a friendly summary, not raw data.');
|
|
220
|
+
return lines.join('\n');
|
|
221
|
+
}
|
package/dist/config.d.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
export type AgentScopes = Record<string, string[]>;
|
|
2
|
+
export interface ScopingConfig {
|
|
3
|
+
appName: string;
|
|
4
|
+
accountId: string;
|
|
5
|
+
allowedDatasets: string[];
|
|
6
|
+
mongodbUri: string;
|
|
7
|
+
agentScopes: AgentScopes;
|
|
8
|
+
/** Per-session field: which collections this agent can access. undefined = unrestricted. */
|
|
9
|
+
agentCollections?: string[];
|
|
10
|
+
}
|
|
11
|
+
export declare function loadConfig(): ScopingConfig;
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
export function loadConfig() {
|
|
2
|
+
const appName = process.env.APP_NAME;
|
|
3
|
+
const accountId = process.env.ACCOUNT_ID;
|
|
4
|
+
const mongodbUri = process.env.MONGODB_URI;
|
|
5
|
+
if (!appName)
|
|
6
|
+
throw new Error('APP_NAME is required');
|
|
7
|
+
if (!accountId)
|
|
8
|
+
throw new Error('ACCOUNT_ID is required');
|
|
9
|
+
if (!mongodbUri)
|
|
10
|
+
throw new Error('MONGODB_URI is required');
|
|
11
|
+
const allowedDatasets = (process.env.ALLOWED_DATASETS || '')
|
|
12
|
+
.split(',')
|
|
13
|
+
.map(s => s.trim())
|
|
14
|
+
.filter(Boolean);
|
|
15
|
+
const agentScopes = parseAgentScopes(process.env.AGENT_SCOPES);
|
|
16
|
+
return { appName, accountId, allowedDatasets, mongodbUri, agentScopes };
|
|
17
|
+
}
|
|
18
|
+
function parseAgentScopes(raw) {
|
|
19
|
+
if (!raw)
|
|
20
|
+
return {};
|
|
21
|
+
try {
|
|
22
|
+
const parsed = JSON.parse(raw);
|
|
23
|
+
if (typeof parsed !== 'object' || parsed === null)
|
|
24
|
+
return {};
|
|
25
|
+
return parsed;
|
|
26
|
+
}
|
|
27
|
+
catch {
|
|
28
|
+
console.warn('AGENT_SCOPES is not valid JSON, ignoring.');
|
|
29
|
+
return {};
|
|
30
|
+
}
|
|
31
|
+
}
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import { StreamableHttpRunner } from 'mongodb-mcp-server';
|
|
2
|
+
import type { ScopingConfig } from './config.js';
|
|
3
|
+
/**
|
|
4
|
+
* Extends StreamableHttpRunner to support per-agent collection scoping.
|
|
5
|
+
*
|
|
6
|
+
* Each Weavy agent connects with a URL like `/mcp?agent=marketing-agent`.
|
|
7
|
+
* The agent ID is extracted from the query parameter and used to look up
|
|
8
|
+
* which collections that agent is allowed to access (from AGENT_SCOPES env var).
|
|
9
|
+
*/
|
|
10
|
+
export declare class GainableHttpRunner extends StreamableHttpRunner {
|
|
11
|
+
private baseConfig;
|
|
12
|
+
constructor(config: ConstructorParameters<typeof StreamableHttpRunner>[0], scopingConfig: ScopingConfig);
|
|
13
|
+
protected createServerForRequest({ request, serverOptions, sessionOptions, }: Parameters<StreamableHttpRunner['createServerForRequest']>[0]): Promise<import("mongodb-mcp-server").Server<{
|
|
14
|
+
apiBaseUrl: string;
|
|
15
|
+
assistantBaseUrl: string;
|
|
16
|
+
loggers: ("mcp" | "disk" | "stderr")[];
|
|
17
|
+
logPath: string;
|
|
18
|
+
disabledTools: string[];
|
|
19
|
+
confirmationRequiredTools: string[];
|
|
20
|
+
readOnly: boolean;
|
|
21
|
+
indexCheck: boolean;
|
|
22
|
+
telemetry: "enabled" | "disabled";
|
|
23
|
+
transport: "stdio" | "http";
|
|
24
|
+
httpPort: number;
|
|
25
|
+
httpHost: string;
|
|
26
|
+
httpHeaders: {
|
|
27
|
+
[x: string]: unknown;
|
|
28
|
+
};
|
|
29
|
+
httpBodyLimit: number;
|
|
30
|
+
idleTimeoutMs: number;
|
|
31
|
+
notificationTimeoutMs: number;
|
|
32
|
+
maxBytesPerQuery: number;
|
|
33
|
+
maxDocumentsPerQuery: number;
|
|
34
|
+
exportsPath: string;
|
|
35
|
+
exportTimeoutMs: number;
|
|
36
|
+
exportCleanupIntervalMs: number;
|
|
37
|
+
atlasTemporaryDatabaseUserLifetimeMs: number;
|
|
38
|
+
voyageApiKey: string;
|
|
39
|
+
embeddingsValidation: boolean;
|
|
40
|
+
vectorSearchDimensions: number;
|
|
41
|
+
vectorSearchSimilarityFunction: "cosine" | "euclidean" | "dotProduct";
|
|
42
|
+
previewFeatures: ("search" | "mcpUI")[];
|
|
43
|
+
allowRequestOverrides: boolean;
|
|
44
|
+
dryRun: boolean;
|
|
45
|
+
externallyManagedSessions: boolean;
|
|
46
|
+
httpResponseType: "json" | "sse";
|
|
47
|
+
apiClientId?: string | undefined;
|
|
48
|
+
apiClientSecret?: string | undefined;
|
|
49
|
+
connectionString?: string | undefined;
|
|
50
|
+
healthCheckPort?: number | undefined;
|
|
51
|
+
healthCheckHost?: string | undefined;
|
|
52
|
+
gssapiHostName?: string | undefined;
|
|
53
|
+
sslFIPSMode?: boolean | undefined;
|
|
54
|
+
ssl?: boolean | undefined;
|
|
55
|
+
sslAllowInvalidCertificates?: boolean | undefined;
|
|
56
|
+
sslAllowInvalidHostnames?: boolean | undefined;
|
|
57
|
+
sslPEMKeyFile?: string | undefined;
|
|
58
|
+
sslPEMKeyPassword?: string | undefined;
|
|
59
|
+
sslCAFile?: string | undefined;
|
|
60
|
+
sslCertificateSelector?: string | undefined;
|
|
61
|
+
sslCRLFile?: string | undefined;
|
|
62
|
+
sslDisabledProtocols?: string | undefined;
|
|
63
|
+
apiVersion?: string | undefined;
|
|
64
|
+
authenticationDatabase?: string | undefined;
|
|
65
|
+
authenticationMechanism?: string | undefined;
|
|
66
|
+
awsAccessKeyId?: string | undefined;
|
|
67
|
+
awsIamSessionToken?: string | undefined;
|
|
68
|
+
awsSecretAccessKey?: string | undefined;
|
|
69
|
+
awsSessionToken?: string | undefined;
|
|
70
|
+
csfleLibraryPath?: string | undefined;
|
|
71
|
+
cryptSharedLibPath?: string | undefined;
|
|
72
|
+
deepInspect?: boolean | undefined;
|
|
73
|
+
db?: string | undefined;
|
|
74
|
+
gssapiServiceName?: string | undefined;
|
|
75
|
+
sspiHostnameCanonicalization?: string | undefined;
|
|
76
|
+
sspiRealmOverride?: string | undefined;
|
|
77
|
+
jsContext?: "repl" | "plain-vm" | "auto" | undefined;
|
|
78
|
+
host?: string | undefined;
|
|
79
|
+
keyVaultNamespace?: string | undefined;
|
|
80
|
+
kmsURL?: string | undefined;
|
|
81
|
+
locale?: string | undefined;
|
|
82
|
+
oidcFlows?: string | undefined;
|
|
83
|
+
oidcRedirectUri?: string | undefined;
|
|
84
|
+
password?: string | undefined;
|
|
85
|
+
port?: string | undefined;
|
|
86
|
+
username?: string | undefined;
|
|
87
|
+
tlsCAFile?: string | undefined;
|
|
88
|
+
tlsCertificateKeyFile?: string | undefined;
|
|
89
|
+
tlsCertificateKeyFilePassword?: string | undefined;
|
|
90
|
+
tlsCertificateSelector?: string | undefined;
|
|
91
|
+
tlsCRLFile?: string | undefined;
|
|
92
|
+
tlsDisabledProtocols?: string | undefined;
|
|
93
|
+
apiDeprecationErrors?: boolean | undefined;
|
|
94
|
+
apiStrict?: boolean | undefined;
|
|
95
|
+
buildInfo?: boolean | undefined;
|
|
96
|
+
exposeAsyncRewriter?: boolean | undefined;
|
|
97
|
+
help?: boolean | undefined;
|
|
98
|
+
ipv6?: boolean | undefined;
|
|
99
|
+
nodb?: boolean | undefined;
|
|
100
|
+
norc?: boolean | undefined;
|
|
101
|
+
oidcTrustedEndpoint?: boolean | undefined;
|
|
102
|
+
oidcIdTokenAsAccessToken?: boolean | undefined;
|
|
103
|
+
oidcNoNonce?: boolean | undefined;
|
|
104
|
+
quiet?: boolean | undefined;
|
|
105
|
+
retryWrites?: boolean | undefined;
|
|
106
|
+
shell?: boolean | undefined;
|
|
107
|
+
skipStartupWarnings?: boolean | undefined;
|
|
108
|
+
verbose?: boolean | undefined;
|
|
109
|
+
version?: boolean | undefined;
|
|
110
|
+
smokeTests?: boolean | undefined;
|
|
111
|
+
perfTests?: boolean | undefined;
|
|
112
|
+
tls?: boolean | undefined;
|
|
113
|
+
tlsAllowInvalidCertificates?: boolean | undefined;
|
|
114
|
+
tlsAllowInvalidHostnames?: boolean | undefined;
|
|
115
|
+
tlsFIPSMode?: boolean | undefined;
|
|
116
|
+
tlsUseSystemCA?: boolean | undefined;
|
|
117
|
+
eval?: string[] | undefined;
|
|
118
|
+
file?: string[] | undefined;
|
|
119
|
+
json?: boolean | "relaxed" | "canonical" | undefined;
|
|
120
|
+
oidcDumpTokens?: boolean | "redacted" | "include-secrets" | undefined;
|
|
121
|
+
browser?: string | false | undefined;
|
|
122
|
+
}, unknown>>;
|
|
123
|
+
}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { StreamableHttpRunner } from 'mongodb-mcp-server';
|
|
2
|
+
import { createScopedConnectionManagerFactory } from './scopedConnectionManager.js';
|
|
3
|
+
/**
|
|
4
|
+
* Extends StreamableHttpRunner to support per-agent collection scoping.
|
|
5
|
+
*
|
|
6
|
+
* Each Weavy agent connects with a URL like `/mcp?agent=marketing-agent`.
|
|
7
|
+
* The agent ID is extracted from the query parameter and used to look up
|
|
8
|
+
* which collections that agent is allowed to access (from AGENT_SCOPES env var).
|
|
9
|
+
*/
|
|
10
|
+
export class GainableHttpRunner extends StreamableHttpRunner {
|
|
11
|
+
baseConfig;
|
|
12
|
+
constructor(config, scopingConfig) {
|
|
13
|
+
super(config);
|
|
14
|
+
this.baseConfig = scopingConfig;
|
|
15
|
+
}
|
|
16
|
+
async createServerForRequest({ request, serverOptions, sessionOptions, }) {
|
|
17
|
+
const agentId = extractAgentId(request);
|
|
18
|
+
console.log(`[MCP Session] agent=${agentId ?? '(none)'} | scoped=${agentId && this.baseConfig.agentScopes[agentId] ? 'yes' : 'no'}`);
|
|
19
|
+
const agentCollections = agentId
|
|
20
|
+
? this.baseConfig.agentScopes[agentId]
|
|
21
|
+
: undefined;
|
|
22
|
+
// Build per-agent scoping config
|
|
23
|
+
const agentConfig = {
|
|
24
|
+
...this.baseConfig,
|
|
25
|
+
agentCollections,
|
|
26
|
+
};
|
|
27
|
+
// Create a per-session connection manager with agent-specific scoping
|
|
28
|
+
const connectionManager = await createScopedConnectionManagerFactory(agentConfig)({
|
|
29
|
+
logger: this.logger,
|
|
30
|
+
deviceId: this.deviceId,
|
|
31
|
+
userConfig: this.userConfig,
|
|
32
|
+
});
|
|
33
|
+
return this.createServer({
|
|
34
|
+
userConfig: this.userConfig,
|
|
35
|
+
logger: undefined,
|
|
36
|
+
serverOptions: {
|
|
37
|
+
tools: this.tools,
|
|
38
|
+
...serverOptions,
|
|
39
|
+
},
|
|
40
|
+
sessionOptions: {
|
|
41
|
+
...sessionOptions,
|
|
42
|
+
connectionManager,
|
|
43
|
+
},
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
function extractAgentId(request) {
|
|
48
|
+
const agent = request?.query?.agent;
|
|
49
|
+
if (typeof agent === 'string')
|
|
50
|
+
return agent;
|
|
51
|
+
if (Array.isArray(agent))
|
|
52
|
+
return agent[0];
|
|
53
|
+
return undefined;
|
|
54
|
+
}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import 'dotenv/config';
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import 'dotenv/config';
|
|
2
|
+
import { UserConfigSchema } from 'mongodb-mcp-server';
|
|
3
|
+
import { FindTool, AggregateTool, CountTool, ExportTool, InsertManyTool, UpdateManyTool, DeleteManyTool, ListCollectionsTool, CollectionSchemaTool, CollectionIndexesTool, ExplainTool, } from 'mongodb-mcp-server/tools';
|
|
4
|
+
import { loadConfig } from './config.js';
|
|
5
|
+
import { GainableHttpRunner } from './gainableHttpRunner.js';
|
|
6
|
+
import { unwrapTools, setToolContext } from './stripUntrustedTags.js';
|
|
7
|
+
import { initSchemaInference, preWarmSchemaCache } from './collectionSchema.js';
|
|
8
|
+
const config = loadConfig();
|
|
9
|
+
// Initialize schema inference connection (used for validation + response hints)
|
|
10
|
+
await initSchemaInference(config.mongodbUri, config.accountId);
|
|
11
|
+
await preWarmSchemaCache(config.appName, config.allowedDatasets);
|
|
12
|
+
// Set tool context so response wrappers can resolve collection names
|
|
13
|
+
setToolContext(config.appName, config.allowedDatasets);
|
|
14
|
+
const userConfig = UserConfigSchema.parse({
|
|
15
|
+
transport: 'http',
|
|
16
|
+
httpPort: parseInt(process.env.HTTP_PORT || '3099', 10),
|
|
17
|
+
httpHost: '127.0.0.1',
|
|
18
|
+
connectionString: config.mongodbUri,
|
|
19
|
+
telemetry: 'disabled',
|
|
20
|
+
disabledTools: [
|
|
21
|
+
'drop-database', 'drop-collection', 'create-collection',
|
|
22
|
+
'list-databases', 'rename-collection', 'drop-index',
|
|
23
|
+
],
|
|
24
|
+
});
|
|
25
|
+
const tools = unwrapTools([
|
|
26
|
+
FindTool,
|
|
27
|
+
AggregateTool,
|
|
28
|
+
CountTool,
|
|
29
|
+
ExportTool,
|
|
30
|
+
InsertManyTool,
|
|
31
|
+
UpdateManyTool,
|
|
32
|
+
DeleteManyTool,
|
|
33
|
+
ListCollectionsTool,
|
|
34
|
+
CollectionSchemaTool,
|
|
35
|
+
CollectionIndexesTool,
|
|
36
|
+
ExplainTool,
|
|
37
|
+
]);
|
|
38
|
+
const runner = new GainableHttpRunner({ userConfig, tools }, config);
|
|
39
|
+
await runner.start({
|
|
40
|
+
serverOptions: {
|
|
41
|
+
toolContext: {
|
|
42
|
+
appName: config.appName,
|
|
43
|
+
accountId: config.accountId,
|
|
44
|
+
allowedDatasets: config.allowedDatasets,
|
|
45
|
+
},
|
|
46
|
+
},
|
|
47
|
+
});
|
|
48
|
+
console.log(`Gainable MCP Server running on http://127.0.0.1:${process.env.HTTP_PORT || '3099'}/mcp`);
|
|
49
|
+
console.log(`App: ${config.appName} | DB: ${config.accountId}`);
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { type ConnectionManagerFactoryFn } from 'mongodb-mcp-server';
|
|
2
|
+
import type { ScopingConfig } from './config.js';
|
|
3
|
+
/**
|
|
4
|
+
* Factory function that creates a ScopedConnectionManager wrapping the default one.
|
|
5
|
+
* Pass this to StdioRunner via sessionOptions.connectionManager.
|
|
6
|
+
*/
|
|
7
|
+
export declare function createScopedConnectionManagerFactory(scopingConfig: ScopingConfig): ConnectionManagerFactoryFn;
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import { ConnectionManager, ConnectionStateConnected, defaultCreateConnectionManager, } from 'mongodb-mcp-server';
|
|
2
|
+
import { createScopedProvider } from './scopedProvider.js';
|
|
3
|
+
/**
|
|
4
|
+
* A ConnectionManager that wraps the default one and replaces the
|
|
5
|
+
* serviceProvider with a scoped proxy on successful connection.
|
|
6
|
+
*
|
|
7
|
+
* This ensures ALL tools that call ensureConnected() get the scoped
|
|
8
|
+
* provider — collection resolution, filter injection, and pipeline
|
|
9
|
+
* sanitization happen transparently.
|
|
10
|
+
*/
|
|
11
|
+
class ScopedConnectionManager extends ConnectionManager {
|
|
12
|
+
inner;
|
|
13
|
+
scopingConfig;
|
|
14
|
+
constructor(inner, scopingConfig) {
|
|
15
|
+
super();
|
|
16
|
+
this.inner = inner;
|
|
17
|
+
this.scopingConfig = scopingConfig;
|
|
18
|
+
// Forward connection events from inner manager
|
|
19
|
+
this.inner.events.on('connection-success', (state) => {
|
|
20
|
+
// Wrap the provider with our scoped proxy
|
|
21
|
+
const scopedProvider = createScopedProvider(state.serviceProvider, this.scopingConfig);
|
|
22
|
+
const scopedState = new ConnectionStateConnected(scopedProvider, state.connectionStringInfo, state.connectedAtlasCluster);
|
|
23
|
+
this.changeState('connection-success', scopedState);
|
|
24
|
+
});
|
|
25
|
+
this.inner.events.on('connection-error', (state) => {
|
|
26
|
+
this.changeState('connection-error', state);
|
|
27
|
+
});
|
|
28
|
+
this.inner.events.on('connection-close', (state) => {
|
|
29
|
+
this.changeState('connection-close', state);
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
async connect(settings) {
|
|
33
|
+
const result = await this.inner.connect(settings);
|
|
34
|
+
// If the inner manager connected successfully, wrap the provider
|
|
35
|
+
if (result.tag === 'connected') {
|
|
36
|
+
const scopedProvider = createScopedProvider(result.serviceProvider, this.scopingConfig);
|
|
37
|
+
return this.changeState('connection-success', new ConnectionStateConnected(scopedProvider, result.connectionStringInfo, result.connectedAtlasCluster));
|
|
38
|
+
}
|
|
39
|
+
return result;
|
|
40
|
+
}
|
|
41
|
+
async disconnect() {
|
|
42
|
+
return this.inner.disconnect();
|
|
43
|
+
}
|
|
44
|
+
async close() {
|
|
45
|
+
return this.inner.close();
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Factory function that creates a ScopedConnectionManager wrapping the default one.
|
|
50
|
+
* Pass this to StdioRunner via sessionOptions.connectionManager.
|
|
51
|
+
*/
|
|
52
|
+
export function createScopedConnectionManagerFactory(scopingConfig) {
|
|
53
|
+
return async ({ logger, deviceId, userConfig }) => {
|
|
54
|
+
const inner = await defaultCreateConnectionManager({ logger, deviceId, userConfig });
|
|
55
|
+
return new ScopedConnectionManager(inner, scopingConfig);
|
|
56
|
+
};
|
|
57
|
+
}
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import { resolveCollection, injectScope, injectScopeIntoDoc, sanitizePipeline, CORE_EXPOSED, } from './scoping/index.js';
|
|
2
|
+
import { getCollectionSchema, getCachedSchema, invalidateSchema } from './collectionSchema.js';
|
|
3
|
+
import { validateDocument, validateUpdate, validateFilter } from './writeValidation.js';
|
|
4
|
+
export function createScopedProvider(realProvider, config) {
|
|
5
|
+
const { appName, accountId, allowedDatasets, agentCollections } = config;
|
|
6
|
+
const resolve = (collection) => resolveCollection(collection, appName, allowedDatasets, agentCollections);
|
|
7
|
+
return new Proxy(realProvider, {
|
|
8
|
+
get(target, prop) {
|
|
9
|
+
const original = target[prop];
|
|
10
|
+
if (typeof original !== 'function')
|
|
11
|
+
return original;
|
|
12
|
+
switch (prop) {
|
|
13
|
+
case 'find':
|
|
14
|
+
return (_database, collection, filter, options) => {
|
|
15
|
+
const resolved = resolve(collection);
|
|
16
|
+
// Validate filter field names against cached schema (sync — find must return a cursor, not a promise)
|
|
17
|
+
const findSchema = getCachedSchema(resolved.realName);
|
|
18
|
+
if (findSchema && filter)
|
|
19
|
+
validateFilter(filter, findSchema);
|
|
20
|
+
const scopedFilter = injectScope(filter || {}, resolved.type, appName, accountId);
|
|
21
|
+
return target.find(accountId, resolved.realName, scopedFilter, options);
|
|
22
|
+
};
|
|
23
|
+
case 'aggregate':
|
|
24
|
+
return (_database, collection, pipeline, options) => {
|
|
25
|
+
const resolved = resolve(collection);
|
|
26
|
+
const sanitized = sanitizePipeline(pipeline, resolved.type, appName, accountId, allowedDatasets);
|
|
27
|
+
return target.aggregate(accountId, resolved.realName, sanitized, options);
|
|
28
|
+
};
|
|
29
|
+
case 'insertMany':
|
|
30
|
+
return async (_database, collection, documents) => {
|
|
31
|
+
const resolved = resolve(collection);
|
|
32
|
+
if (resolved.type === 'data')
|
|
33
|
+
throw new Error('Data collections are read-only');
|
|
34
|
+
// Validate against inferred schema
|
|
35
|
+
const insertSchema = await getCollectionSchema(resolved.realName, collection);
|
|
36
|
+
if (insertSchema) {
|
|
37
|
+
for (const doc of documents)
|
|
38
|
+
validateDocument(doc, insertSchema);
|
|
39
|
+
}
|
|
40
|
+
const scopedDocs = documents.map(doc => injectScopeIntoDoc(doc, resolved.type, appName));
|
|
41
|
+
const result = await target.insertMany(accountId, resolved.realName, scopedDocs);
|
|
42
|
+
invalidateSchema(resolved.realName);
|
|
43
|
+
return result;
|
|
44
|
+
};
|
|
45
|
+
case 'updateMany':
|
|
46
|
+
return async (_database, collection, filter, update, options) => {
|
|
47
|
+
const resolved = resolve(collection);
|
|
48
|
+
if (resolved.type === 'data')
|
|
49
|
+
throw new Error('Data collections are read-only');
|
|
50
|
+
// Validate against inferred schema
|
|
51
|
+
const updateSchema = await getCollectionSchema(resolved.realName, collection);
|
|
52
|
+
if (updateSchema)
|
|
53
|
+
validateUpdate(update, updateSchema);
|
|
54
|
+
const scopedFilter = injectScope(filter || {}, resolved.type, appName, accountId);
|
|
55
|
+
const result = await target.updateMany(accountId, resolved.realName, scopedFilter, update, options);
|
|
56
|
+
invalidateSchema(resolved.realName);
|
|
57
|
+
return result;
|
|
58
|
+
};
|
|
59
|
+
case 'deleteMany':
|
|
60
|
+
return (_database, collection, filter) => {
|
|
61
|
+
const resolved = resolve(collection);
|
|
62
|
+
if (resolved.type === 'data')
|
|
63
|
+
throw new Error('Data collections are read-only');
|
|
64
|
+
const scopedFilter = injectScope(filter || {}, resolved.type, appName, accountId);
|
|
65
|
+
return target.deleteMany(accountId, resolved.realName, scopedFilter);
|
|
66
|
+
};
|
|
67
|
+
case 'countDocuments':
|
|
68
|
+
return (_database, collection, query, options) => {
|
|
69
|
+
const resolved = resolve(collection);
|
|
70
|
+
// Validate query field names against cached schema
|
|
71
|
+
const countSchema = getCachedSchema(resolved.realName);
|
|
72
|
+
if (countSchema && query)
|
|
73
|
+
validateFilter(query, countSchema);
|
|
74
|
+
const scopedQuery = injectScope(query || {}, resolved.type, appName, accountId);
|
|
75
|
+
return target.countDocuments(accountId, resolved.realName, scopedQuery, options);
|
|
76
|
+
};
|
|
77
|
+
case 'listCollections':
|
|
78
|
+
return async (_database, filter, options) => {
|
|
79
|
+
const collections = await target.listCollections(accountId, filter, options);
|
|
80
|
+
return filterAndRenameCollections(collections, appName, allowedDatasets, agentCollections);
|
|
81
|
+
};
|
|
82
|
+
case 'listDatabases':
|
|
83
|
+
return () => {
|
|
84
|
+
throw new Error('listDatabases is not available in scoped mode');
|
|
85
|
+
};
|
|
86
|
+
default:
|
|
87
|
+
return original.bind(target);
|
|
88
|
+
}
|
|
89
|
+
},
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
function filterAndRenameCollections(collections, appName, allowedDatasets, agentCollections) {
|
|
93
|
+
const results = [];
|
|
94
|
+
for (const col of collections) {
|
|
95
|
+
const name = col.name;
|
|
96
|
+
// Core exposed
|
|
97
|
+
if (CORE_EXPOSED.includes(name)) {
|
|
98
|
+
results.push({ ...col, type: 'core', writable: true });
|
|
99
|
+
continue;
|
|
100
|
+
}
|
|
101
|
+
// App's custom collections — strip prefix for clean name
|
|
102
|
+
if (name.startsWith(`${appName}_`)) {
|
|
103
|
+
results.push({
|
|
104
|
+
...col,
|
|
105
|
+
name: name.slice(appName.length + 1),
|
|
106
|
+
type: 'custom',
|
|
107
|
+
writable: true,
|
|
108
|
+
});
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
111
|
+
// Allowed data collections
|
|
112
|
+
for (const datasetId of allowedDatasets) {
|
|
113
|
+
if (name === `data_${datasetId}`) {
|
|
114
|
+
results.push({
|
|
115
|
+
...col,
|
|
116
|
+
name: datasetId,
|
|
117
|
+
type: 'data',
|
|
118
|
+
writable: false,
|
|
119
|
+
});
|
|
120
|
+
break;
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
// Everything else is hidden
|
|
124
|
+
}
|
|
125
|
+
// Agent-level restriction: filter to only allowed collections
|
|
126
|
+
if (agentCollections) {
|
|
127
|
+
return results.filter(col => agentCollections.includes(col.name));
|
|
128
|
+
}
|
|
129
|
+
return results;
|
|
130
|
+
}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export { resolveCollection, CORE_EXPOSED } from './resolveCollection.js';
|
|
2
|
+
export type { ResolvedCollection, CollectionType } from './resolveCollection.js';
|
|
3
|
+
export { injectScope, injectScopeIntoDoc, stripScopeFields } from './injectScope.js';
|
|
4
|
+
export { sanitizePipeline } from './sanitizePipeline.js';
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { CollectionType } from './resolveCollection.js';
|
|
2
|
+
export declare function injectScope(filter: Record<string, any>, type: CollectionType, appName: string, accountId: string): Record<string, any>;
|
|
3
|
+
export declare function stripScopeFields(filter: any): any;
|
|
4
|
+
export declare function injectScopeIntoDoc(doc: Record<string, any>, type: CollectionType, appName: string): Record<string, any>;
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
export function injectScope(filter, type, appName, accountId) {
|
|
2
|
+
const clean = makeCaseInsensitive(stripScopeFields(filter));
|
|
3
|
+
switch (type) {
|
|
4
|
+
case 'core':
|
|
5
|
+
return { ...clean, appId: appName };
|
|
6
|
+
case 'data':
|
|
7
|
+
return { ...clean, accountId: accountId };
|
|
8
|
+
case 'custom':
|
|
9
|
+
return clean;
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
export function stripScopeFields(filter) {
|
|
13
|
+
if (!filter || typeof filter !== 'object')
|
|
14
|
+
return filter;
|
|
15
|
+
const result = { ...filter };
|
|
16
|
+
delete result.appId;
|
|
17
|
+
delete result.accountId;
|
|
18
|
+
for (const op of ['$and', '$or', '$nor']) {
|
|
19
|
+
if (Array.isArray(result[op])) {
|
|
20
|
+
result[op] = result[op].map(stripScopeFields);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
return result;
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Convert string equality values in a filter to case-insensitive regex.
|
|
27
|
+
* e.g. { status: "won" } → { status: { $regex: "^won$", $options: "i" } }
|
|
28
|
+
*/
|
|
29
|
+
function makeCaseInsensitive(filter) {
|
|
30
|
+
if (!filter || typeof filter !== 'object')
|
|
31
|
+
return filter;
|
|
32
|
+
const result = {};
|
|
33
|
+
for (const [key, value] of Object.entries(filter)) {
|
|
34
|
+
if (key.startsWith('$')) {
|
|
35
|
+
// Recurse into $and/$or/$nor arrays
|
|
36
|
+
if (Array.isArray(value)) {
|
|
37
|
+
result[key] = value.map(makeCaseInsensitive);
|
|
38
|
+
}
|
|
39
|
+
else if (key === '$in' && Array.isArray(value)) {
|
|
40
|
+
result[key] = value;
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
result[key] = value;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
else if (typeof value === 'string') {
|
|
47
|
+
// Escape regex special chars and wrap in anchored case-insensitive match
|
|
48
|
+
const escaped = value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
49
|
+
result[key] = { $regex: `^${escaped}$`, $options: 'i' };
|
|
50
|
+
}
|
|
51
|
+
else if (typeof value === 'object' && value !== null && !Array.isArray(value)) {
|
|
52
|
+
// Handle operator objects like { $in: ["won", "lost"] }
|
|
53
|
+
const obj = value;
|
|
54
|
+
if (obj.$in && Array.isArray(obj.$in)) {
|
|
55
|
+
result[key] = {
|
|
56
|
+
...obj,
|
|
57
|
+
$in: obj.$in.map((v) => typeof v === 'string' ? new RegExp(`^${v.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')}$`, 'i') : v),
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
else {
|
|
61
|
+
result[key] = value;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
else {
|
|
65
|
+
result[key] = value;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return result;
|
|
69
|
+
}
|
|
70
|
+
export function injectScopeIntoDoc(doc, type, appName) {
|
|
71
|
+
switch (type) {
|
|
72
|
+
case 'core':
|
|
73
|
+
return { ...doc, appId: appName };
|
|
74
|
+
default:
|
|
75
|
+
return doc;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export declare const CORE_EXPOSED: readonly ["users", "userfieldmetadatas"];
|
|
2
|
+
export type CollectionType = 'core' | 'custom' | 'data';
|
|
3
|
+
export interface ResolvedCollection {
|
|
4
|
+
realName: string;
|
|
5
|
+
type: CollectionType;
|
|
6
|
+
writable: boolean;
|
|
7
|
+
}
|
|
8
|
+
export declare function resolveCollection(cleanName: string, appName: string, allowedDatasets: string[], agentCollections?: string[]): ResolvedCollection;
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
export const CORE_EXPOSED = ['users', 'userfieldmetadatas'];
|
|
2
|
+
const BLOCKED = ['agents', 'connections', 'emailtemplates', 'emaillogs', 'datamodels'];
|
|
3
|
+
export function resolveCollection(cleanName, appName, allowedDatasets, agentCollections) {
|
|
4
|
+
// 0. Agent-level restriction: if agent has a scoped list, check it first
|
|
5
|
+
if (agentCollections && !agentCollections.includes(cleanName)) {
|
|
6
|
+
throw new Error(`Collection '${cleanName}' is not accessible`);
|
|
7
|
+
}
|
|
8
|
+
// 1. Core collections — name stays as-is
|
|
9
|
+
if (CORE_EXPOSED.includes(cleanName)) {
|
|
10
|
+
return { realName: cleanName, type: 'core', writable: true };
|
|
11
|
+
}
|
|
12
|
+
// 2. Data collections — check allowed datasets
|
|
13
|
+
for (const datasetId of allowedDatasets) {
|
|
14
|
+
const realName = `data_${datasetId}`;
|
|
15
|
+
if (cleanName === realName || cleanName === datasetId) {
|
|
16
|
+
return { realName, type: 'data', writable: false };
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
// 3. Blocked core/system collections
|
|
20
|
+
if (BLOCKED.includes(cleanName)) {
|
|
21
|
+
throw new Error(`Collection '${cleanName}' is not accessible`);
|
|
22
|
+
}
|
|
23
|
+
// 4. Custom collections — add app prefix
|
|
24
|
+
const prefixed = `${appName}_${cleanName}`;
|
|
25
|
+
return { realName: prefixed, type: 'custom', writable: true };
|
|
26
|
+
}
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { resolveCollection } from './resolveCollection.js';
|
|
2
|
+
const BLOCKED_STAGES = ['$out', '$merge', '$where', '$function'];
|
|
3
|
+
export function sanitizePipeline(pipeline, type, appName, accountId, allowedDatasets) {
|
|
4
|
+
// 1. Prepend scope $match
|
|
5
|
+
const scopeMatch = type === 'core'
|
|
6
|
+
? { $match: { appId: appName } }
|
|
7
|
+
: type === 'data'
|
|
8
|
+
? { $match: { accountId } }
|
|
9
|
+
: null;
|
|
10
|
+
const sanitized = scopeMatch ? [scopeMatch, ...pipeline] : [...pipeline];
|
|
11
|
+
// 2. Validate all stages
|
|
12
|
+
for (const stage of sanitized) {
|
|
13
|
+
for (const blocked of BLOCKED_STAGES) {
|
|
14
|
+
if (blocked in stage) {
|
|
15
|
+
throw new Error(`${blocked} is not allowed`);
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
// 3. Validate $lookup targets
|
|
19
|
+
if ('$lookup' in stage) {
|
|
20
|
+
const from = stage.$lookup.from;
|
|
21
|
+
try {
|
|
22
|
+
resolveCollection(from, appName, allowedDatasets);
|
|
23
|
+
}
|
|
24
|
+
catch {
|
|
25
|
+
throw new Error(`$lookup to collection '${from}' is not allowed`);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
return sanitized;
|
|
30
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { ToolClass } from 'mongodb-mcp-server/tools';
|
|
2
|
+
export declare function setToolContext(appName: string, allowedDatasets: string[]): void;
|
|
3
|
+
/**
|
|
4
|
+
* Strips the `<untrusted-user-data-UUID>` wrapper and security warnings
|
|
5
|
+
* from tool response text content.
|
|
6
|
+
*
|
|
7
|
+
* The official mongodb-mcp-server wraps all query results in these tags
|
|
8
|
+
* as a prompt injection defense. In our case the data is already scoped
|
|
9
|
+
* and the copilot (Weavy/OpenAI) needs to read the actual values —
|
|
10
|
+
* the tags cause models to ignore or hallucinate over the real data.
|
|
11
|
+
*
|
|
12
|
+
* This creates a new tool class that extends the original and post-processes
|
|
13
|
+
* the invoke() result to remove the wrapping.
|
|
14
|
+
*/
|
|
15
|
+
export declare function unwrapTool<T extends ToolClass>(ToolCtor: T): T;
|
|
16
|
+
/**
|
|
17
|
+
* Convenience: wrap an array of tool classes.
|
|
18
|
+
*/
|
|
19
|
+
export declare function unwrapTools<T extends ToolClass>(tools: T[]): T[];
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { getCollectionSchema, formatSchemaHint } from './collectionSchema.js';
|
|
2
|
+
import { resolveCollection } from './scoping/index.js';
|
|
3
|
+
/** Set by index.ts at startup so tool wrappers can resolve collection names. */
|
|
4
|
+
let _appName = '';
|
|
5
|
+
let _allowedDatasets = [];
|
|
6
|
+
export function setToolContext(appName, allowedDatasets) {
|
|
7
|
+
_appName = appName;
|
|
8
|
+
_allowedDatasets = allowedDatasets;
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Strips the `<untrusted-user-data-UUID>` wrapper and security warnings
|
|
12
|
+
* from tool response text content.
|
|
13
|
+
*
|
|
14
|
+
* The official mongodb-mcp-server wraps all query results in these tags
|
|
15
|
+
* as a prompt injection defense. In our case the data is already scoped
|
|
16
|
+
* and the copilot (Weavy/OpenAI) needs to read the actual values —
|
|
17
|
+
* the tags cause models to ignore or hallucinate over the real data.
|
|
18
|
+
*
|
|
19
|
+
* This creates a new tool class that extends the original and post-processes
|
|
20
|
+
* the invoke() result to remove the wrapping.
|
|
21
|
+
*/
|
|
22
|
+
export function unwrapTool(ToolCtor) {
|
|
23
|
+
// Create a subclass that overrides invoke to strip tags
|
|
24
|
+
const Wrapped = class extends ToolCtor {
|
|
25
|
+
async invoke(...args) {
|
|
26
|
+
let result;
|
|
27
|
+
try {
|
|
28
|
+
result = await super.invoke(...args);
|
|
29
|
+
}
|
|
30
|
+
catch (err) {
|
|
31
|
+
// Return validation/scoping errors as tool responses instead of crashing
|
|
32
|
+
return { content: [{ type: 'text', text: err?.message || String(err) }] };
|
|
33
|
+
}
|
|
34
|
+
if (result?.content && Array.isArray(result.content)) {
|
|
35
|
+
// Strip untrusted wrappers and collect all text into one item.
|
|
36
|
+
// Weavy/OpenAI only reads the first content item, so we must
|
|
37
|
+
// merge everything into a single text entry.
|
|
38
|
+
const textParts = [];
|
|
39
|
+
const nonTextItems = [];
|
|
40
|
+
for (const item of result.content) {
|
|
41
|
+
if (item?.type === 'text' && typeof item.text === 'string') {
|
|
42
|
+
const cleaned = stripUntrustedWrapper(item.text);
|
|
43
|
+
if (cleaned)
|
|
44
|
+
textParts.push(cleaned);
|
|
45
|
+
}
|
|
46
|
+
else {
|
|
47
|
+
nonTextItems.push(item);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
// Append schema hint for the target collection so the agent
|
|
51
|
+
// knows the correct field names, types, and valid values.
|
|
52
|
+
const collection = args[0]?.collection;
|
|
53
|
+
if (collection && _appName) {
|
|
54
|
+
try {
|
|
55
|
+
const resolved = resolveCollection(collection, _appName, _allowedDatasets);
|
|
56
|
+
const schema = await getCollectionSchema(resolved.realName, collection);
|
|
57
|
+
if (schema) {
|
|
58
|
+
textParts.push(formatSchemaHint(schema));
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
catch {
|
|
62
|
+
// Schema hints are best-effort, don't break the response
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
result.content = [
|
|
66
|
+
...(textParts.length > 0
|
|
67
|
+
? [{ type: 'text', text: textParts.join('\n\n') }]
|
|
68
|
+
: []),
|
|
69
|
+
...nonTextItems,
|
|
70
|
+
];
|
|
71
|
+
}
|
|
72
|
+
return result;
|
|
73
|
+
}
|
|
74
|
+
};
|
|
75
|
+
// Preserve static properties that the server uses for registration
|
|
76
|
+
Wrapped.toolName = ToolCtor.toolName;
|
|
77
|
+
Wrapped.category = ToolCtor.category;
|
|
78
|
+
Wrapped.operationType = ToolCtor.operationType;
|
|
79
|
+
return Wrapped;
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Strips the untrusted-user-data wrapper from a text string.
|
|
83
|
+
*/
|
|
84
|
+
function stripUntrustedWrapper(text) {
|
|
85
|
+
const regex = /^[ \t]*<untrusted-user-data-[0-9a-f\-]*>(?<data>.*)^[ \t]*<\/untrusted-user-data-[0-9a-f\-]*>/gms;
|
|
86
|
+
const match = regex.exec(text);
|
|
87
|
+
if (!match?.groups?.data)
|
|
88
|
+
return text;
|
|
89
|
+
return match.groups.data.trim();
|
|
90
|
+
}
|
|
91
|
+
/**
|
|
92
|
+
* Convenience: wrap an array of tool classes.
|
|
93
|
+
*/
|
|
94
|
+
export function unwrapTools(tools) {
|
|
95
|
+
return tools.map(unwrapTool);
|
|
96
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import type { CollectionSchemaInfo } from './collectionSchema.js';
|
|
2
|
+
/**
|
|
3
|
+
* Generic write validation based on inferred collection schema.
|
|
4
|
+
* Throws descriptive errors so the AI agent can self-correct.
|
|
5
|
+
*/
|
|
6
|
+
/**
|
|
7
|
+
* Validate filter/query fields against the inferred schema.
|
|
8
|
+
* Rejects queries using non-existent field names so the agent self-corrects
|
|
9
|
+
* instead of returning 0 results silently.
|
|
10
|
+
*/
|
|
11
|
+
export declare function validateFilter(filter: Record<string, any>, schema: CollectionSchemaInfo): void;
|
|
12
|
+
/**
|
|
13
|
+
* Validate a document being inserted against the inferred schema.
|
|
14
|
+
*/
|
|
15
|
+
export declare function validateDocument(doc: Record<string, any>, schema: CollectionSchemaInfo): void;
|
|
16
|
+
/**
|
|
17
|
+
* Validate fields in an update operation against the inferred schema.
|
|
18
|
+
*/
|
|
19
|
+
export declare function validateUpdate(update: Record<string, any>, schema: CollectionSchemaInfo): void;
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generic write validation based on inferred collection schema.
|
|
3
|
+
* Throws descriptive errors so the AI agent can self-correct.
|
|
4
|
+
*/
|
|
5
|
+
/**
|
|
6
|
+
* Validate filter/query fields against the inferred schema.
|
|
7
|
+
* Rejects queries using non-existent field names so the agent self-corrects
|
|
8
|
+
* instead of returning 0 results silently.
|
|
9
|
+
*/
|
|
10
|
+
export function validateFilter(filter, schema) {
|
|
11
|
+
const knownFields = new Set(schema.fields.map(f => f.name));
|
|
12
|
+
const knownFieldNames = schema.fields.map(f => f.name).join(', ');
|
|
13
|
+
for (const key of Object.keys(filter)) {
|
|
14
|
+
// Skip MongoDB operators, internal fields, and scope fields
|
|
15
|
+
if (key.startsWith('$') || key === '_id' || key === '__v' || key === 'app')
|
|
16
|
+
continue;
|
|
17
|
+
if (knownFields.has(key))
|
|
18
|
+
continue;
|
|
19
|
+
// Look for a case-insensitive match
|
|
20
|
+
const match = schema.fields.find(f => f.name.toLowerCase() === key.toLowerCase());
|
|
21
|
+
if (match) {
|
|
22
|
+
throw new Error(`Unknown filter field "${key}". Did you mean "${match.name}"? Known fields: ${knownFieldNames}. Use the collection-schema tool to discover correct field names before querying.`);
|
|
23
|
+
}
|
|
24
|
+
// Check for snake_case vs camelCase
|
|
25
|
+
if (key.includes('_')) {
|
|
26
|
+
const camelVersion = snakeToCamel(key);
|
|
27
|
+
const camelMatch = schema.fields.find(f => f.name.toLowerCase() === camelVersion.toLowerCase());
|
|
28
|
+
if (camelMatch) {
|
|
29
|
+
throw new Error(`Unknown filter field "${key}". Did you mean "${camelMatch.name}"? Known fields: ${knownFieldNames}. Use the collection-schema tool to discover correct field names before querying.`);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
if (/[a-z][A-Z]/.test(key)) {
|
|
33
|
+
const snakeVersion = camelToSnake(key);
|
|
34
|
+
const snakeMatch = schema.fields.find(f => f.name.toLowerCase() === snakeVersion.toLowerCase());
|
|
35
|
+
if (snakeMatch) {
|
|
36
|
+
throw new Error(`Unknown filter field "${key}". Did you mean "${snakeMatch.name}"? Known fields: ${knownFieldNames}. Use the collection-schema tool to discover correct field names before querying.`);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
// Reject completely unknown fields
|
|
40
|
+
throw new Error(`Unknown filter field "${key}". This collection only has these fields: ${knownFieldNames}. Use the collection-schema tool to discover correct field names before querying.`);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Validate a document being inserted against the inferred schema.
|
|
45
|
+
*/
|
|
46
|
+
export function validateDocument(doc, schema) {
|
|
47
|
+
validateFieldNaming(doc, schema);
|
|
48
|
+
validateEnumValues(doc, schema);
|
|
49
|
+
validateReferences(doc, schema);
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Validate fields in an update operation against the inferred schema.
|
|
53
|
+
*/
|
|
54
|
+
export function validateUpdate(update, schema) {
|
|
55
|
+
if (update.$set && typeof update.$set === 'object') {
|
|
56
|
+
validateFieldNaming(update.$set, schema);
|
|
57
|
+
validateEnumValues(update.$set, schema);
|
|
58
|
+
validateReferences(update.$set, schema);
|
|
59
|
+
}
|
|
60
|
+
if (update.$unset && typeof update.$unset === 'object') {
|
|
61
|
+
validateFieldNaming(update.$unset, schema);
|
|
62
|
+
}
|
|
63
|
+
// Replacement-style updates (no $ operators)
|
|
64
|
+
const hasOperators = Object.keys(update).some(k => k.startsWith('$'));
|
|
65
|
+
if (!hasOperators) {
|
|
66
|
+
validateFieldNaming(update, schema);
|
|
67
|
+
validateEnumValues(update, schema);
|
|
68
|
+
validateReferences(update, schema);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Reject fields that violate the collection's naming convention.
|
|
73
|
+
* e.g. snake_case fields in a camelCase collection.
|
|
74
|
+
*/
|
|
75
|
+
function validateFieldNaming(fields, schema) {
|
|
76
|
+
const knownFields = new Set(schema.fields.map(f => f.name));
|
|
77
|
+
const knownFieldNames = schema.fields.map(f => f.name).join(', ');
|
|
78
|
+
for (const key of Object.keys(fields)) {
|
|
79
|
+
if (key.startsWith('$') || key === '_id' || key === '__v')
|
|
80
|
+
continue;
|
|
81
|
+
if (knownFields.has(key))
|
|
82
|
+
continue;
|
|
83
|
+
// Look for a case-insensitive match to suggest the right name
|
|
84
|
+
const match = schema.fields.find(f => f.name.toLowerCase() === key.toLowerCase());
|
|
85
|
+
if (match) {
|
|
86
|
+
throw new Error(`Invalid field name "${key}". Use "${match.name}" instead (exact casing required). Known fields: ${knownFieldNames}.`);
|
|
87
|
+
}
|
|
88
|
+
// Check for snake_case vs camelCase conversion match
|
|
89
|
+
if (key.includes('_')) {
|
|
90
|
+
const camelVersion = snakeToCamel(key);
|
|
91
|
+
const camelMatch = schema.fields.find(f => f.name.toLowerCase() === camelVersion.toLowerCase());
|
|
92
|
+
if (camelMatch) {
|
|
93
|
+
throw new Error(`Invalid field name "${key}". Use "${camelMatch.name}" instead. Known fields: ${knownFieldNames}.`);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
if (/[a-z][A-Z]/.test(key)) {
|
|
97
|
+
const snakeVersion = camelToSnake(key);
|
|
98
|
+
const snakeMatch = schema.fields.find(f => f.name.toLowerCase() === snakeVersion.toLowerCase());
|
|
99
|
+
if (snakeMatch) {
|
|
100
|
+
throw new Error(`Invalid field name "${key}". Use "${snakeMatch.name}" instead. Known fields: ${knownFieldNames}.`);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
// Reject completely unknown fields
|
|
104
|
+
throw new Error(`Unknown field "${key}". This collection only has these fields: ${knownFieldNames}. Use the collection-schema tool to check the correct field names before writing.`);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
/**
|
|
108
|
+
* Validate that enum-like fields use valid values (case-sensitive).
|
|
109
|
+
*/
|
|
110
|
+
function validateEnumValues(fields, schema) {
|
|
111
|
+
for (const [key, value] of Object.entries(fields)) {
|
|
112
|
+
if (typeof value !== 'string')
|
|
113
|
+
continue;
|
|
114
|
+
const fieldSchema = schema.fields.find(f => f.name === key);
|
|
115
|
+
if (!fieldSchema?.enumValues || fieldSchema.enumValues.length === 0)
|
|
116
|
+
continue;
|
|
117
|
+
if (!fieldSchema.enumValues.includes(value)) {
|
|
118
|
+
// Check for case mismatch
|
|
119
|
+
const caseMatch = fieldSchema.enumValues.find(v => v.toLowerCase() === value.toLowerCase());
|
|
120
|
+
if (caseMatch) {
|
|
121
|
+
throw new Error(`Invalid value "${value}" for field "${key}". Use "${caseMatch}" (exact casing required). Valid values: ${fieldSchema.enumValues.map(v => `"${v}"`).join(', ')}.`);
|
|
122
|
+
}
|
|
123
|
+
else {
|
|
124
|
+
throw new Error(`Invalid value "${value}" for field "${key}". Valid values: ${fieldSchema.enumValues.map(v => `"${v}"`).join(', ')}.`);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Validate that reference fields contain valid ObjectId strings, not names.
|
|
131
|
+
*/
|
|
132
|
+
function validateReferences(fields, schema) {
|
|
133
|
+
for (const [key, value] of Object.entries(fields)) {
|
|
134
|
+
if (typeof value !== 'string')
|
|
135
|
+
continue;
|
|
136
|
+
const fieldSchema = schema.fields.find(f => f.name === key);
|
|
137
|
+
if (!fieldSchema?.isReference)
|
|
138
|
+
continue;
|
|
139
|
+
// ObjectId hex strings are exactly 24 characters
|
|
140
|
+
if (!/^[0-9a-f]{24}$/i.test(value)) {
|
|
141
|
+
throw new Error(`Field "${key}" is a reference (ObjectId). Got "${value}" which is not a valid ObjectId. ` +
|
|
142
|
+
`Query the related collection first to find the correct _id value.`);
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
function snakeToCamel(s) {
|
|
147
|
+
return s.replace(/_([a-z])/g, (_, c) => c.toUpperCase());
|
|
148
|
+
}
|
|
149
|
+
function camelToSnake(s) {
|
|
150
|
+
return s.replace(/[A-Z]/g, c => `_${c.toLowerCase()}`);
|
|
151
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@gainable.dev/mcp-server",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Scoped MCP server for Gainable in-app copilot agents — wraps mongodb-mcp-server with per-app data isolation",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "dist/index.js",
|
|
7
|
+
"bin": {
|
|
8
|
+
"gainable-mcp-server": "dist/index.js"
|
|
9
|
+
},
|
|
10
|
+
"files": [
|
|
11
|
+
"dist"
|
|
12
|
+
],
|
|
13
|
+
"scripts": {
|
|
14
|
+
"build": "tsc",
|
|
15
|
+
"prepublishOnly": "npm run build",
|
|
16
|
+
"dev": "tsx src/index.ts",
|
|
17
|
+
"start": "node dist/index.js",
|
|
18
|
+
"test": "vitest run",
|
|
19
|
+
"test:watch": "vitest"
|
|
20
|
+
},
|
|
21
|
+
"publishConfig": {
|
|
22
|
+
"access": "restricted"
|
|
23
|
+
},
|
|
24
|
+
"dependencies": {
|
|
25
|
+
"dotenv": "^17.3.1",
|
|
26
|
+
"mongodb-mcp-server": "^1.8.0"
|
|
27
|
+
},
|
|
28
|
+
"devDependencies": {
|
|
29
|
+
"@types/node": "^22.0.0",
|
|
30
|
+
"tsx": "^4.19.0",
|
|
31
|
+
"typescript": "^5.7.0",
|
|
32
|
+
"vitest": "^3.0.0"
|
|
33
|
+
}
|
|
34
|
+
}
|