@theihtisham/mcp-server-firebase 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +362 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +79 -0
- package/dist/services/firebase.d.ts +14 -0
- package/dist/services/firebase.js +163 -0
- package/dist/tools/auth.d.ts +3 -0
- package/dist/tools/auth.js +346 -0
- package/dist/tools/firestore.d.ts +3 -0
- package/dist/tools/firestore.js +802 -0
- package/dist/tools/functions.d.ts +3 -0
- package/dist/tools/functions.js +168 -0
- package/dist/tools/index.d.ts +10 -0
- package/dist/tools/index.js +30 -0
- package/dist/tools/messaging.d.ts +3 -0
- package/dist/tools/messaging.js +296 -0
- package/dist/tools/realtime-db.d.ts +4 -0
- package/dist/tools/realtime-db.js +271 -0
- package/dist/tools/storage.d.ts +3 -0
- package/dist/tools/storage.js +279 -0
- package/dist/tools/types.d.ts +11 -0
- package/dist/tools/types.js +3 -0
- package/dist/utils/cache.d.ts +16 -0
- package/dist/utils/cache.js +75 -0
- package/dist/utils/errors.d.ts +15 -0
- package/dist/utils/errors.js +94 -0
- package/dist/utils/index.d.ts +5 -0
- package/dist/utils/index.js +37 -0
- package/dist/utils/pagination.d.ts +28 -0
- package/dist/utils/pagination.js +75 -0
- package/dist/utils/validation.d.ts +22 -0
- package/dist/utils/validation.js +172 -0
- package/package.json +53 -0
- package/src/index.ts +94 -0
- package/src/services/firebase.ts +140 -0
- package/src/tools/auth.ts +375 -0
- package/src/tools/firestore.ts +931 -0
- package/src/tools/functions.ts +189 -0
- package/src/tools/index.ts +24 -0
- package/src/tools/messaging.ts +324 -0
- package/src/tools/realtime-db.ts +307 -0
- package/src/tools/storage.ts +314 -0
- package/src/tools/types.ts +10 -0
- package/src/utils/cache.ts +82 -0
- package/src/utils/errors.ts +110 -0
- package/src/utils/index.ts +4 -0
- package/src/utils/pagination.ts +105 -0
- package/src/utils/validation.ts +212 -0
- package/tests/cache.test.ts +139 -0
- package/tests/errors.test.ts +132 -0
- package/tests/firebase-service.test.ts +46 -0
- package/tests/pagination.test.ts +26 -0
- package/tests/tools.test.ts +226 -0
- package/tests/validation.test.ts +216 -0
- package/tsconfig.json +26 -0
- package/vitest.config.ts +15 -0
|
@@ -0,0 +1,802 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.firestoreTools = void 0;
|
|
4
|
+
const firebase_js_1 = require("../services/firebase.js");
|
|
5
|
+
const index_js_1 = require("../utils/index.js");
|
|
6
|
+
const firestore_1 = require("firebase-admin/firestore");
|
|
7
|
+
// ============================================================
|
|
8
|
+
// FIRESTORE TOOLS
|
|
9
|
+
// ============================================================
|
|
10
|
+
exports.firestoreTools = [
|
|
11
|
+
// ── firestore_query ──────────────────────────────────
|
|
12
|
+
{
|
|
13
|
+
name: 'firestore_query',
|
|
14
|
+
description: 'Query Firestore collections with filtering, ordering, and pagination. ' +
|
|
15
|
+
'Supports where clauses, order by, limit, and cursor-based pagination.',
|
|
16
|
+
inputSchema: {
|
|
17
|
+
type: 'object',
|
|
18
|
+
properties: {
|
|
19
|
+
collection: {
|
|
20
|
+
type: 'string',
|
|
21
|
+
description: 'Collection path (e.g., "users" or "users/uid1/orders")',
|
|
22
|
+
},
|
|
23
|
+
where: {
|
|
24
|
+
type: 'array',
|
|
25
|
+
items: {
|
|
26
|
+
type: 'object',
|
|
27
|
+
properties: {
|
|
28
|
+
field: { type: 'string' },
|
|
29
|
+
operator: { type: 'string', enum: ['==', '!=', '<', '<=', '>', '>=', 'array-contains', 'array-contains-any', 'in', 'not-in'] },
|
|
30
|
+
value: { description: 'Value to compare against' },
|
|
31
|
+
},
|
|
32
|
+
required: ['field', 'operator', 'value'],
|
|
33
|
+
},
|
|
34
|
+
description: 'Array of where clauses for filtering',
|
|
35
|
+
},
|
|
36
|
+
orderBy: {
|
|
37
|
+
type: 'array',
|
|
38
|
+
items: {
|
|
39
|
+
type: 'object',
|
|
40
|
+
properties: {
|
|
41
|
+
field: { type: 'string' },
|
|
42
|
+
direction: { type: 'string', enum: ['asc', 'desc'] },
|
|
43
|
+
},
|
|
44
|
+
required: ['field'],
|
|
45
|
+
},
|
|
46
|
+
description: 'Array of order-by specifications',
|
|
47
|
+
},
|
|
48
|
+
limit: { type: 'number', description: 'Maximum results (1-10000, default 100)' },
|
|
49
|
+
pageToken: { type: 'string', description: 'Pagination token from previous query result' },
|
|
50
|
+
select: {
|
|
51
|
+
type: 'array',
|
|
52
|
+
items: { type: 'string' },
|
|
53
|
+
description: 'Fields to include in the result (projection)',
|
|
54
|
+
},
|
|
55
|
+
},
|
|
56
|
+
required: ['collection'],
|
|
57
|
+
},
|
|
58
|
+
handler: async (args) => {
|
|
59
|
+
try {
|
|
60
|
+
const collectionPath = (0, index_js_1.validateCollectionPath)(args['collection']);
|
|
61
|
+
const limit = Math.min(args['limit'] || 100, 10000);
|
|
62
|
+
const pageToken = args['pageToken'];
|
|
63
|
+
const whereClauses = args['where'] || [];
|
|
64
|
+
const orderByClauses = args['orderBy'] || [];
|
|
65
|
+
const selectFields = args['select'];
|
|
66
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
67
|
+
let query = db.collection(collectionPath);
|
|
68
|
+
// Apply where clauses
|
|
69
|
+
for (const w of whereClauses) {
|
|
70
|
+
const field = (0, index_js_1.validateWhereField)(w.field);
|
|
71
|
+
const op = (0, index_js_1.validateOperator)(w.operator);
|
|
72
|
+
query = query.where(field, op, w.value);
|
|
73
|
+
}
|
|
74
|
+
// Apply orderBy
|
|
75
|
+
for (const o of orderByClauses) {
|
|
76
|
+
query = query.orderBy(o.field, (o.direction || 'asc'));
|
|
77
|
+
}
|
|
78
|
+
// Apply pagination
|
|
79
|
+
if (pageToken) {
|
|
80
|
+
const decodedPath = Buffer.from(pageToken, 'base64').toString('utf-8');
|
|
81
|
+
const lastDocSnap = await db.doc(decodedPath).get();
|
|
82
|
+
if (lastDocSnap.exists) {
|
|
83
|
+
query = query.startAfter(lastDocSnap);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
query = query.limit(limit);
|
|
87
|
+
// Apply field selection
|
|
88
|
+
if (selectFields && selectFields.length > 0) {
|
|
89
|
+
query = query.select(...selectFields);
|
|
90
|
+
}
|
|
91
|
+
const snapshot = await query.get();
|
|
92
|
+
const docs = snapshot.docs.map((doc) => ({
|
|
93
|
+
id: doc.id,
|
|
94
|
+
path: doc.ref.path,
|
|
95
|
+
data: doc.data(),
|
|
96
|
+
}));
|
|
97
|
+
let nextPageToken;
|
|
98
|
+
if (snapshot.size === limit && snapshot.docs.length > 0) {
|
|
99
|
+
const lastDoc = snapshot.docs[snapshot.docs.length - 1];
|
|
100
|
+
nextPageToken = Buffer.from(lastDoc.ref.path).toString('base64');
|
|
101
|
+
}
|
|
102
|
+
return (0, index_js_1.formatListResult)(docs, nextPageToken);
|
|
103
|
+
}
|
|
104
|
+
catch (err) {
|
|
105
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'query');
|
|
106
|
+
}
|
|
107
|
+
},
|
|
108
|
+
},
|
|
109
|
+
// ── firestore_get_document ────────────────────────────
|
|
110
|
+
{
|
|
111
|
+
name: 'firestore_get_document',
|
|
112
|
+
description: 'Get a single Firestore document by its full path.',
|
|
113
|
+
inputSchema: {
|
|
114
|
+
type: 'object',
|
|
115
|
+
properties: {
|
|
116
|
+
path: { type: 'string', description: 'Full document path (e.g., "users/uid123")' },
|
|
117
|
+
},
|
|
118
|
+
required: ['path'],
|
|
119
|
+
},
|
|
120
|
+
handler: async (args) => {
|
|
121
|
+
try {
|
|
122
|
+
const docPath = (0, index_js_1.validateDocumentPath)(args['path']);
|
|
123
|
+
const cacheKey = `doc:${docPath}`;
|
|
124
|
+
const cached = index_js_1.firestoreCache.get(cacheKey);
|
|
125
|
+
if (cached) {
|
|
126
|
+
return (0, index_js_1.formatSuccess)(cached);
|
|
127
|
+
}
|
|
128
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
129
|
+
const docSnap = await db.doc(docPath).get();
|
|
130
|
+
if (!docSnap.exists) {
|
|
131
|
+
return (0, index_js_1.formatSuccess)({
|
|
132
|
+
exists: false,
|
|
133
|
+
path: docPath,
|
|
134
|
+
message: `Document "${docPath}" does not exist.`,
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
const result = {
|
|
138
|
+
exists: true,
|
|
139
|
+
id: docSnap.id,
|
|
140
|
+
path: docSnap.ref.path,
|
|
141
|
+
data: docSnap.data(),
|
|
142
|
+
};
|
|
143
|
+
index_js_1.firestoreCache.set(cacheKey, result, 10_000);
|
|
144
|
+
return (0, index_js_1.formatSuccess)(result);
|
|
145
|
+
}
|
|
146
|
+
catch (err) {
|
|
147
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'get_document');
|
|
148
|
+
}
|
|
149
|
+
},
|
|
150
|
+
},
|
|
151
|
+
// ── firestore_add_document ────────────────────────────
|
|
152
|
+
{
|
|
153
|
+
name: 'firestore_add_document',
|
|
154
|
+
description: 'Add a new document to a Firestore collection. Auto-generates a document ID.',
|
|
155
|
+
inputSchema: {
|
|
156
|
+
type: 'object',
|
|
157
|
+
properties: {
|
|
158
|
+
collection: { type: 'string', description: 'Collection path' },
|
|
159
|
+
data: { type: 'object', description: 'Document data' },
|
|
160
|
+
},
|
|
161
|
+
required: ['collection', 'data'],
|
|
162
|
+
},
|
|
163
|
+
handler: async (args) => {
|
|
164
|
+
try {
|
|
165
|
+
const collectionPath = (0, index_js_1.validateCollectionPath)(args['collection']);
|
|
166
|
+
const data = (0, index_js_1.sanitizeData)(args['data']);
|
|
167
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
168
|
+
const docRef = await db.collection(collectionPath).add({
|
|
169
|
+
...data,
|
|
170
|
+
_createdAt: firestore_1.FieldValue.serverTimestamp(),
|
|
171
|
+
_updatedAt: firestore_1.FieldValue.serverTimestamp(),
|
|
172
|
+
});
|
|
173
|
+
index_js_1.firestoreCache.invalidatePrefix(`doc:${collectionPath}`);
|
|
174
|
+
return (0, index_js_1.formatSuccess)({
|
|
175
|
+
id: docRef.id,
|
|
176
|
+
path: docRef.path,
|
|
177
|
+
message: `Document added successfully to "${collectionPath}".`,
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
catch (err) {
|
|
181
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'add_document');
|
|
182
|
+
}
|
|
183
|
+
},
|
|
184
|
+
},
|
|
185
|
+
// ── firestore_set_document ────────────────────────────
|
|
186
|
+
{
|
|
187
|
+
name: 'firestore_set_document',
|
|
188
|
+
description: 'Create or overwrite a document at a specific path. Use merge: true to merge with existing data.',
|
|
189
|
+
inputSchema: {
|
|
190
|
+
type: 'object',
|
|
191
|
+
properties: {
|
|
192
|
+
path: { type: 'string', description: 'Full document path (e.g., "users/uid123")' },
|
|
193
|
+
data: { type: 'object', description: 'Document data' },
|
|
194
|
+
merge: { type: 'boolean', description: 'If true, merge with existing data instead of overwriting (default: false)' },
|
|
195
|
+
},
|
|
196
|
+
required: ['path', 'data'],
|
|
197
|
+
},
|
|
198
|
+
handler: async (args) => {
|
|
199
|
+
try {
|
|
200
|
+
const docPath = (0, index_js_1.validateDocumentPath)(args['path']);
|
|
201
|
+
const data = (0, index_js_1.sanitizeData)(args['data']);
|
|
202
|
+
const merge = args['merge'] || false;
|
|
203
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
204
|
+
await db.doc(docPath).set({
|
|
205
|
+
...data,
|
|
206
|
+
_updatedAt: firestore_1.FieldValue.serverTimestamp(),
|
|
207
|
+
}, { merge });
|
|
208
|
+
index_js_1.firestoreCache.delete(`doc:${docPath}`);
|
|
209
|
+
const parts = docPath.split('/');
|
|
210
|
+
parts.pop();
|
|
211
|
+
if (parts.length > 0) {
|
|
212
|
+
index_js_1.firestoreCache.invalidatePrefix(`doc:${parts.join('/')}`);
|
|
213
|
+
}
|
|
214
|
+
return (0, index_js_1.formatSuccess)({
|
|
215
|
+
path: docPath,
|
|
216
|
+
merged: merge,
|
|
217
|
+
message: `Document "${docPath}" ${merge ? 'merged' : 'set'} successfully.`,
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
catch (err) {
|
|
221
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'set_document');
|
|
222
|
+
}
|
|
223
|
+
},
|
|
224
|
+
},
|
|
225
|
+
// ── firestore_update_document ─────────────────────────
|
|
226
|
+
{
|
|
227
|
+
name: 'firestore_update_document',
|
|
228
|
+
description: 'Update specific fields of an existing document. Only the provided fields are modified. ' +
|
|
229
|
+
'Use dot notation for nested fields (e.g., "address.city").',
|
|
230
|
+
inputSchema: {
|
|
231
|
+
type: 'object',
|
|
232
|
+
properties: {
|
|
233
|
+
path: { type: 'string', description: 'Full document path' },
|
|
234
|
+
data: { type: 'object', description: 'Fields to update (supports dot notation for nested fields)' },
|
|
235
|
+
},
|
|
236
|
+
required: ['path', 'data'],
|
|
237
|
+
},
|
|
238
|
+
handler: async (args) => {
|
|
239
|
+
try {
|
|
240
|
+
const docPath = (0, index_js_1.validateDocumentPath)(args['path']);
|
|
241
|
+
const data = (0, index_js_1.sanitizeData)(args['data']);
|
|
242
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
243
|
+
const docRef = db.doc(docPath);
|
|
244
|
+
const docSnap = await docRef.get();
|
|
245
|
+
if (!docSnap.exists) {
|
|
246
|
+
return (0, index_js_1.formatSuccess)({
|
|
247
|
+
success: false,
|
|
248
|
+
message: `Document "${docPath}" does not exist. Use firestore_set_document to create it.`,
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
await docRef.update({
|
|
252
|
+
...data,
|
|
253
|
+
_updatedAt: firestore_1.FieldValue.serverTimestamp(),
|
|
254
|
+
});
|
|
255
|
+
index_js_1.firestoreCache.delete(`doc:${docPath}`);
|
|
256
|
+
return (0, index_js_1.formatSuccess)({
|
|
257
|
+
path: docPath,
|
|
258
|
+
message: `Document "${docPath}" updated successfully.`,
|
|
259
|
+
});
|
|
260
|
+
}
|
|
261
|
+
catch (err) {
|
|
262
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'update_document');
|
|
263
|
+
}
|
|
264
|
+
},
|
|
265
|
+
},
|
|
266
|
+
// ── firestore_delete_document ─────────────────────────
|
|
267
|
+
{
|
|
268
|
+
name: 'firestore_delete_document',
|
|
269
|
+
description: 'Delete a Firestore document. Optionally delete all subcollections recursively.',
|
|
270
|
+
inputSchema: {
|
|
271
|
+
type: 'object',
|
|
272
|
+
properties: {
|
|
273
|
+
path: { type: 'string', description: 'Full document path' },
|
|
274
|
+
recursive: { type: 'boolean', description: 'If true, delete all subcollections recursively (default: false)' },
|
|
275
|
+
},
|
|
276
|
+
required: ['path'],
|
|
277
|
+
},
|
|
278
|
+
handler: async (args) => {
|
|
279
|
+
try {
|
|
280
|
+
const docPath = (0, index_js_1.validateDocumentPath)(args['path']);
|
|
281
|
+
const recursive = args['recursive'] || false;
|
|
282
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
283
|
+
const docRef = db.doc(docPath);
|
|
284
|
+
if (recursive) {
|
|
285
|
+
await deleteDocumentRecursive(db, docRef);
|
|
286
|
+
}
|
|
287
|
+
else {
|
|
288
|
+
await docRef.delete();
|
|
289
|
+
}
|
|
290
|
+
index_js_1.firestoreCache.delete(`doc:${docPath}`);
|
|
291
|
+
const parts = docPath.split('/');
|
|
292
|
+
parts.pop();
|
|
293
|
+
if (parts.length > 0) {
|
|
294
|
+
index_js_1.firestoreCache.invalidatePrefix(`doc:${parts.join('/')}`);
|
|
295
|
+
}
|
|
296
|
+
return (0, index_js_1.formatSuccess)({
|
|
297
|
+
path: docPath,
|
|
298
|
+
recursive,
|
|
299
|
+
message: `Document "${docPath}" deleted${recursive ? ' recursively' : ''}.`,
|
|
300
|
+
});
|
|
301
|
+
}
|
|
302
|
+
catch (err) {
|
|
303
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'delete_document');
|
|
304
|
+
}
|
|
305
|
+
},
|
|
306
|
+
},
|
|
307
|
+
// ── firestore_batch_write ─────────────────────────────
|
|
308
|
+
{
|
|
309
|
+
name: 'firestore_batch_write',
|
|
310
|
+
description: 'Execute multiple write operations atomically in a single batch. ' +
|
|
311
|
+
'Supports set, update, and delete operations. Maximum 500 operations per batch.',
|
|
312
|
+
inputSchema: {
|
|
313
|
+
type: 'object',
|
|
314
|
+
properties: {
|
|
315
|
+
operations: {
|
|
316
|
+
type: 'array',
|
|
317
|
+
items: {
|
|
318
|
+
type: 'object',
|
|
319
|
+
properties: {
|
|
320
|
+
type: { type: 'string', enum: ['set', 'update', 'delete'], description: 'Operation type' },
|
|
321
|
+
path: { type: 'string', description: 'Document path' },
|
|
322
|
+
data: { type: 'object', description: 'Data for set/update operations' },
|
|
323
|
+
merge: { type: 'boolean', description: 'For set operations, merge with existing data' },
|
|
324
|
+
},
|
|
325
|
+
required: ['type', 'path'],
|
|
326
|
+
},
|
|
327
|
+
description: 'Array of write operations (max 500)',
|
|
328
|
+
},
|
|
329
|
+
},
|
|
330
|
+
required: ['operations'],
|
|
331
|
+
},
|
|
332
|
+
handler: async (args) => {
|
|
333
|
+
try {
|
|
334
|
+
const operations = args['operations'];
|
|
335
|
+
if (!operations || operations.length === 0) {
|
|
336
|
+
throw new Error('Operations array cannot be empty.');
|
|
337
|
+
}
|
|
338
|
+
if (operations.length > 500) {
|
|
339
|
+
throw new Error('Batch write exceeds maximum of 500 operations. ' +
|
|
340
|
+
'Split into multiple batch writes of 500 or fewer operations.');
|
|
341
|
+
}
|
|
342
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
343
|
+
const batch = db.batch();
|
|
344
|
+
for (const op of operations) {
|
|
345
|
+
const docPath = (0, index_js_1.validateDocumentPath)(op.path);
|
|
346
|
+
const ref = db.doc(docPath);
|
|
347
|
+
switch (op.type) {
|
|
348
|
+
case 'set': {
|
|
349
|
+
const data = (0, index_js_1.sanitizeData)(op.data);
|
|
350
|
+
batch.set(ref, {
|
|
351
|
+
...data,
|
|
352
|
+
_updatedAt: firestore_1.FieldValue.serverTimestamp(),
|
|
353
|
+
}, { merge: op.merge ?? false });
|
|
354
|
+
break;
|
|
355
|
+
}
|
|
356
|
+
case 'update': {
|
|
357
|
+
const data = (0, index_js_1.sanitizeData)(op.data);
|
|
358
|
+
batch.update(ref, {
|
|
359
|
+
...data,
|
|
360
|
+
_updatedAt: firestore_1.FieldValue.serverTimestamp(),
|
|
361
|
+
});
|
|
362
|
+
break;
|
|
363
|
+
}
|
|
364
|
+
case 'delete': {
|
|
365
|
+
batch.delete(ref);
|
|
366
|
+
break;
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
await batch.commit();
|
|
371
|
+
for (const op of operations) {
|
|
372
|
+
index_js_1.firestoreCache.delete(`doc:${op.path}`);
|
|
373
|
+
}
|
|
374
|
+
return (0, index_js_1.formatSuccess)({
|
|
375
|
+
operationsCount: operations.length,
|
|
376
|
+
message: `Batch write completed successfully with ${operations.length} operations.`,
|
|
377
|
+
});
|
|
378
|
+
}
|
|
379
|
+
catch (err) {
|
|
380
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'batch_write');
|
|
381
|
+
}
|
|
382
|
+
},
|
|
383
|
+
},
|
|
384
|
+
// ── firestore_transaction ─────────────────────────────
|
|
385
|
+
{
|
|
386
|
+
name: 'firestore_transaction',
|
|
387
|
+
description: 'Execute a transaction that reads documents and performs conditional writes atomically.',
|
|
388
|
+
inputSchema: {
|
|
389
|
+
type: 'object',
|
|
390
|
+
properties: {
|
|
391
|
+
readPaths: {
|
|
392
|
+
type: 'array',
|
|
393
|
+
items: { type: 'string' },
|
|
394
|
+
description: 'Document paths to read at the start of the transaction',
|
|
395
|
+
},
|
|
396
|
+
operations: {
|
|
397
|
+
type: 'array',
|
|
398
|
+
items: {
|
|
399
|
+
type: 'object',
|
|
400
|
+
properties: {
|
|
401
|
+
type: { type: 'string', enum: ['set', 'update', 'delete'] },
|
|
402
|
+
path: { type: 'string' },
|
|
403
|
+
data: { type: 'object' },
|
|
404
|
+
merge: { type: 'boolean' },
|
|
405
|
+
},
|
|
406
|
+
required: ['type', 'path'],
|
|
407
|
+
},
|
|
408
|
+
description: 'Write operations to execute',
|
|
409
|
+
},
|
|
410
|
+
},
|
|
411
|
+
required: ['readPaths', 'operations'],
|
|
412
|
+
},
|
|
413
|
+
handler: async (args) => {
|
|
414
|
+
try {
|
|
415
|
+
const readPaths = args['readPaths'].map((p) => (0, index_js_1.validateDocumentPath)(p));
|
|
416
|
+
const operations = args['operations'];
|
|
417
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
418
|
+
const result = await db.runTransaction(async (transaction) => {
|
|
419
|
+
const reads = {};
|
|
420
|
+
for (const path of readPaths) {
|
|
421
|
+
const docSnap = await transaction.get(db.doc(path));
|
|
422
|
+
reads[path] = docSnap.exists ? { exists: true, data: docSnap.data() } : { exists: false };
|
|
423
|
+
}
|
|
424
|
+
for (const op of operations) {
|
|
425
|
+
const docPath = (0, index_js_1.validateDocumentPath)(op.path);
|
|
426
|
+
const ref = db.doc(docPath);
|
|
427
|
+
switch (op.type) {
|
|
428
|
+
case 'set': {
|
|
429
|
+
const data = (0, index_js_1.sanitizeData)(op.data);
|
|
430
|
+
transaction.set(ref, {
|
|
431
|
+
...data,
|
|
432
|
+
_updatedAt: firestore_1.FieldValue.serverTimestamp(),
|
|
433
|
+
}, { merge: op.merge ?? false });
|
|
434
|
+
break;
|
|
435
|
+
}
|
|
436
|
+
case 'update': {
|
|
437
|
+
const data = (0, index_js_1.sanitizeData)(op.data);
|
|
438
|
+
transaction.update(ref, {
|
|
439
|
+
...data,
|
|
440
|
+
_updatedAt: firestore_1.FieldValue.serverTimestamp(),
|
|
441
|
+
});
|
|
442
|
+
break;
|
|
443
|
+
}
|
|
444
|
+
case 'delete': {
|
|
445
|
+
transaction.delete(ref);
|
|
446
|
+
break;
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
return reads;
|
|
451
|
+
});
|
|
452
|
+
return (0, index_js_1.formatSuccess)({
|
|
453
|
+
readDocuments: result,
|
|
454
|
+
writesApplied: operations.length,
|
|
455
|
+
message: 'Transaction completed successfully.',
|
|
456
|
+
});
|
|
457
|
+
}
|
|
458
|
+
catch (err) {
|
|
459
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'transaction');
|
|
460
|
+
}
|
|
461
|
+
},
|
|
462
|
+
},
|
|
463
|
+
// ── firestore_list_collections ────────────────────────
|
|
464
|
+
{
|
|
465
|
+
name: 'firestore_list_collections',
|
|
466
|
+
description: 'List subcollections of a document, or root-level collections if no document path is provided.',
|
|
467
|
+
inputSchema: {
|
|
468
|
+
type: 'object',
|
|
469
|
+
properties: {
|
|
470
|
+
documentPath: { type: 'string', description: 'Optional document path to list its subcollections' },
|
|
471
|
+
},
|
|
472
|
+
},
|
|
473
|
+
handler: async (args) => {
|
|
474
|
+
try {
|
|
475
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
476
|
+
const docPath = args['documentPath'];
|
|
477
|
+
let collections;
|
|
478
|
+
if (docPath) {
|
|
479
|
+
(0, index_js_1.validateDocumentPath)(docPath);
|
|
480
|
+
collections = await db.doc(docPath).listCollections();
|
|
481
|
+
}
|
|
482
|
+
else {
|
|
483
|
+
collections = await db.listCollections();
|
|
484
|
+
}
|
|
485
|
+
const result = collections.map((col) => ({
|
|
486
|
+
id: col.id,
|
|
487
|
+
path: col.path,
|
|
488
|
+
}));
|
|
489
|
+
return (0, index_js_1.formatSuccess)(result);
|
|
490
|
+
}
|
|
491
|
+
catch (err) {
|
|
492
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'list_collections');
|
|
493
|
+
}
|
|
494
|
+
},
|
|
495
|
+
},
|
|
496
|
+
// ── firestore_list_subcollections ─────────────────────
|
|
497
|
+
{
|
|
498
|
+
name: 'firestore_list_subcollections',
|
|
499
|
+
description: 'List all subcollections of a specific document.',
|
|
500
|
+
inputSchema: {
|
|
501
|
+
type: 'object',
|
|
502
|
+
properties: {
|
|
503
|
+
path: { type: 'string', description: 'Document path to inspect' },
|
|
504
|
+
},
|
|
505
|
+
required: ['path'],
|
|
506
|
+
},
|
|
507
|
+
handler: async (args) => {
|
|
508
|
+
try {
|
|
509
|
+
const docPath = (0, index_js_1.validateDocumentPath)(args['path']);
|
|
510
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
511
|
+
const collections = await db.doc(docPath).listCollections();
|
|
512
|
+
const result = collections.map((col) => ({
|
|
513
|
+
id: col.id,
|
|
514
|
+
path: col.path,
|
|
515
|
+
}));
|
|
516
|
+
return (0, index_js_1.formatSuccess)({
|
|
517
|
+
documentPath: docPath,
|
|
518
|
+
subcollections: result,
|
|
519
|
+
});
|
|
520
|
+
}
|
|
521
|
+
catch (err) {
|
|
522
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'list_subcollections');
|
|
523
|
+
}
|
|
524
|
+
},
|
|
525
|
+
},
|
|
526
|
+
// ── firestore_aggregate_query ─────────────────────────
|
|
527
|
+
{
|
|
528
|
+
name: 'firestore_aggregate_query',
|
|
529
|
+
description: 'Run aggregation queries (count, sum, average) on Firestore collections with optional filters.',
|
|
530
|
+
inputSchema: {
|
|
531
|
+
type: 'object',
|
|
532
|
+
properties: {
|
|
533
|
+
collection: { type: 'string', description: 'Collection path' },
|
|
534
|
+
aggregations: {
|
|
535
|
+
type: 'array',
|
|
536
|
+
items: {
|
|
537
|
+
type: 'object',
|
|
538
|
+
properties: {
|
|
539
|
+
type: { type: 'string', enum: ['count', 'sum', 'avg'], description: 'Aggregation type' },
|
|
540
|
+
field: { type: 'string', description: 'Field to aggregate (not needed for count)' },
|
|
541
|
+
alias: { type: 'string', description: 'Alias for the result' },
|
|
542
|
+
},
|
|
543
|
+
required: ['type'],
|
|
544
|
+
},
|
|
545
|
+
description: 'Aggregation operations to perform',
|
|
546
|
+
},
|
|
547
|
+
where: {
|
|
548
|
+
type: 'array',
|
|
549
|
+
items: {
|
|
550
|
+
type: 'object',
|
|
551
|
+
properties: {
|
|
552
|
+
field: { type: 'string' },
|
|
553
|
+
operator: { type: 'string' },
|
|
554
|
+
value: {},
|
|
555
|
+
},
|
|
556
|
+
required: ['field', 'operator', 'value'],
|
|
557
|
+
},
|
|
558
|
+
description: 'Optional where clauses to filter before aggregating',
|
|
559
|
+
},
|
|
560
|
+
},
|
|
561
|
+
required: ['collection', 'aggregations'],
|
|
562
|
+
},
|
|
563
|
+
handler: async (args) => {
|
|
564
|
+
try {
|
|
565
|
+
const collectionPath = (0, index_js_1.validateCollectionPath)(args['collection']);
|
|
566
|
+
const aggregations = args['aggregations'];
|
|
567
|
+
const whereClauses = args['where'] || [];
|
|
568
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
569
|
+
let query = db.collection(collectionPath);
|
|
570
|
+
for (const w of whereClauses) {
|
|
571
|
+
const field = (0, index_js_1.validateWhereField)(w.field);
|
|
572
|
+
const op = (0, index_js_1.validateOperator)(w.operator);
|
|
573
|
+
query = query.where(field, op, w.value);
|
|
574
|
+
}
|
|
575
|
+
const aggregateSpec = {};
|
|
576
|
+
for (const agg of aggregations) {
|
|
577
|
+
const alias = agg.alias || `${agg.type}_${agg.field || 'all'}`;
|
|
578
|
+
switch (agg.type) {
|
|
579
|
+
case 'count':
|
|
580
|
+
aggregateSpec[alias] = firestore_1.AggregateField.count();
|
|
581
|
+
break;
|
|
582
|
+
case 'sum':
|
|
583
|
+
if (!agg.field)
|
|
584
|
+
throw new Error('Sum aggregation requires a "field" parameter.');
|
|
585
|
+
aggregateSpec[alias] = firestore_1.AggregateField.sum(agg.field);
|
|
586
|
+
break;
|
|
587
|
+
case 'avg':
|
|
588
|
+
if (!agg.field)
|
|
589
|
+
throw new Error('Average aggregation requires a "field" parameter.');
|
|
590
|
+
aggregateSpec[alias] = firestore_1.AggregateField.average(agg.field);
|
|
591
|
+
break;
|
|
592
|
+
}
|
|
593
|
+
}
|
|
594
|
+
const snapshot = await query.aggregate(aggregateSpec).get();
|
|
595
|
+
const results = {};
|
|
596
|
+
for (const agg of aggregations) {
|
|
597
|
+
const alias = agg.alias || `${agg.type}_${agg.field || 'all'}`;
|
|
598
|
+
results[alias] = snapshot.data()[alias];
|
|
599
|
+
}
|
|
600
|
+
return (0, index_js_1.formatSuccess)(results);
|
|
601
|
+
}
|
|
602
|
+
catch (err) {
|
|
603
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'aggregate_query');
|
|
604
|
+
}
|
|
605
|
+
},
|
|
606
|
+
},
|
|
607
|
+
// ── firestore_listen_changes ──────────────────────────
|
|
608
|
+
{
|
|
609
|
+
name: 'firestore_listen_changes',
|
|
610
|
+
description: 'Listen for real-time changes on a Firestore collection or document for a short duration.',
|
|
611
|
+
inputSchema: {
|
|
612
|
+
type: 'object',
|
|
613
|
+
properties: {
|
|
614
|
+
path: { type: 'string', description: 'Collection or document path to watch' },
|
|
615
|
+
durationMs: { type: 'number', description: 'Duration to listen in milliseconds (1000-30000, default 5000)' },
|
|
616
|
+
},
|
|
617
|
+
required: ['path'],
|
|
618
|
+
},
|
|
619
|
+
handler: async (args) => {
|
|
620
|
+
try {
|
|
621
|
+
const path = args['path'].trim();
|
|
622
|
+
const durationMs = Math.min(Math.max(args['durationMs'] || 5000, 1000), 30000);
|
|
623
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
624
|
+
const segments = path.split('/');
|
|
625
|
+
const changes = [];
|
|
626
|
+
let unsubscribe;
|
|
627
|
+
const startTime = Date.now();
|
|
628
|
+
await new Promise((resolve) => {
|
|
629
|
+
if (segments.length % 2 === 0) {
|
|
630
|
+
(0, index_js_1.validateDocumentPath)(path);
|
|
631
|
+
unsubscribe = db.doc(path).onSnapshot((snap) => {
|
|
632
|
+
changes.push({
|
|
633
|
+
type: snap.exists ? 'modified' : 'removed',
|
|
634
|
+
path: snap.ref.path,
|
|
635
|
+
data: snap.data(),
|
|
636
|
+
duration: Date.now() - startTime,
|
|
637
|
+
});
|
|
638
|
+
}, (err) => {
|
|
639
|
+
changes.push({
|
|
640
|
+
type: 'error',
|
|
641
|
+
path,
|
|
642
|
+
data: err.message,
|
|
643
|
+
duration: Date.now() - startTime,
|
|
644
|
+
});
|
|
645
|
+
});
|
|
646
|
+
}
|
|
647
|
+
else {
|
|
648
|
+
(0, index_js_1.validateCollectionPath)(path);
|
|
649
|
+
unsubscribe = db.collection(path).onSnapshot((snap) => {
|
|
650
|
+
snap.docChanges().forEach((change) => {
|
|
651
|
+
changes.push({
|
|
652
|
+
type: change.type,
|
|
653
|
+
path: change.doc.ref.path,
|
|
654
|
+
data: change.doc.data(),
|
|
655
|
+
duration: Date.now() - startTime,
|
|
656
|
+
});
|
|
657
|
+
});
|
|
658
|
+
}, (err) => {
|
|
659
|
+
changes.push({
|
|
660
|
+
type: 'error',
|
|
661
|
+
path,
|
|
662
|
+
data: err.message,
|
|
663
|
+
duration: Date.now() - startTime,
|
|
664
|
+
});
|
|
665
|
+
});
|
|
666
|
+
}
|
|
667
|
+
setTimeout(() => {
|
|
668
|
+
if (unsubscribe)
|
|
669
|
+
unsubscribe();
|
|
670
|
+
resolve();
|
|
671
|
+
}, durationMs);
|
|
672
|
+
});
|
|
673
|
+
return (0, index_js_1.formatSuccess)({
|
|
674
|
+
path,
|
|
675
|
+
listenedForMs: durationMs,
|
|
676
|
+
changesDetected: changes.length,
|
|
677
|
+
changes,
|
|
678
|
+
});
|
|
679
|
+
}
|
|
680
|
+
catch (err) {
|
|
681
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'listen_changes');
|
|
682
|
+
}
|
|
683
|
+
},
|
|
684
|
+
},
|
|
685
|
+
// ── firestore_infer_schema ────────────────────────────
|
|
686
|
+
{
|
|
687
|
+
name: 'firestore_infer_schema',
|
|
688
|
+
description: 'Infer the schema of a Firestore collection by sampling documents. Returns field names, types, and occurrence counts.',
|
|
689
|
+
inputSchema: {
|
|
690
|
+
type: 'object',
|
|
691
|
+
properties: {
|
|
692
|
+
collection: { type: 'string', description: 'Collection path to analyze' },
|
|
693
|
+
sampleSize: { type: 'number', description: 'Number of documents to sample (1-100, default 20)' },
|
|
694
|
+
},
|
|
695
|
+
required: ['collection'],
|
|
696
|
+
},
|
|
697
|
+
handler: async (args) => {
|
|
698
|
+
try {
|
|
699
|
+
const collectionPath = (0, index_js_1.validateCollectionPath)(args['collection']);
|
|
700
|
+
const sampleSize = Math.min(Math.max(args['sampleSize'] || 20, 1), 100);
|
|
701
|
+
const cacheKey = `schema:${collectionPath}`;
|
|
702
|
+
const cached = index_js_1.schemaCache.get(cacheKey);
|
|
703
|
+
if (cached) {
|
|
704
|
+
return (0, index_js_1.formatSuccess)(cached);
|
|
705
|
+
}
|
|
706
|
+
const db = (0, firebase_js_1.getFirestore)();
|
|
707
|
+
const snapshot = await db.collection(collectionPath).limit(sampleSize).get();
|
|
708
|
+
if (snapshot.empty) {
|
|
709
|
+
return (0, index_js_1.formatSuccess)({
|
|
710
|
+
collection: collectionPath,
|
|
711
|
+
documentCount: 0,
|
|
712
|
+
message: 'Collection is empty. No schema to infer.',
|
|
713
|
+
});
|
|
714
|
+
}
|
|
715
|
+
const fieldStats = {};
|
|
716
|
+
for (const doc of snapshot.docs) {
|
|
717
|
+
const data = doc.data();
|
|
718
|
+
inferFields(data, '', fieldStats, 0);
|
|
719
|
+
}
|
|
720
|
+
const totalDocs = snapshot.size;
|
|
721
|
+
const schema = {};
|
|
722
|
+
for (const [field, stats] of Object.entries(fieldStats)) {
|
|
723
|
+
const dominantType = Object.entries(stats.types)
|
|
724
|
+
.sort((a, b) => b[1] - a[1])[0][0];
|
|
725
|
+
schema[field] = {
|
|
726
|
+
type: dominantType,
|
|
727
|
+
presence: `${((stats.count / totalDocs) * 100).toFixed(1)}%`,
|
|
728
|
+
nullable: stats.nullable,
|
|
729
|
+
sampleValues: stats.sampleValues.slice(0, 3),
|
|
730
|
+
};
|
|
731
|
+
}
|
|
732
|
+
const result = {
|
|
733
|
+
collection: collectionPath,
|
|
734
|
+
documentsSampled: totalDocs,
|
|
735
|
+
fields: schema,
|
|
736
|
+
};
|
|
737
|
+
index_js_1.schemaCache.set(cacheKey, result, 10 * 60 * 1000);
|
|
738
|
+
return (0, index_js_1.formatSuccess)(result);
|
|
739
|
+
}
|
|
740
|
+
catch (err) {
|
|
741
|
+
(0, index_js_1.handleFirebaseError)(err, 'firestore', 'infer_schema');
|
|
742
|
+
}
|
|
743
|
+
},
|
|
744
|
+
},
|
|
745
|
+
];
|
|
746
|
+
// ── Helpers ──────────────────────────────────────────────
|
|
747
|
+
async function deleteDocumentRecursive(db, docRef) {
|
|
748
|
+
const collections = await docRef.listCollections();
|
|
749
|
+
for (const col of collections) {
|
|
750
|
+
const snapshot = await col.limit(500).get();
|
|
751
|
+
for (const doc of snapshot.docs) {
|
|
752
|
+
await deleteDocumentRecursive(db, doc.ref);
|
|
753
|
+
}
|
|
754
|
+
}
|
|
755
|
+
await docRef.delete();
|
|
756
|
+
}
|
|
757
|
+
function inferFields(data, prefix, fieldStats, depth) {
|
|
758
|
+
if (depth > 10)
|
|
759
|
+
return;
|
|
760
|
+
if (data === null || data === undefined)
|
|
761
|
+
return;
|
|
762
|
+
if (typeof data !== 'object' || Array.isArray(data))
|
|
763
|
+
return;
|
|
764
|
+
for (const [key, value] of Object.entries(data)) {
|
|
765
|
+
if (key.startsWith('_'))
|
|
766
|
+
continue;
|
|
767
|
+
const fieldPath = prefix ? `${prefix}.${key}` : key;
|
|
768
|
+
if (!fieldStats[fieldPath]) {
|
|
769
|
+
fieldStats[fieldPath] = { types: {}, count: 0, nullable: false, sampleValues: [] };
|
|
770
|
+
}
|
|
771
|
+
const stats = fieldStats[fieldPath];
|
|
772
|
+
stats.count++;
|
|
773
|
+
if (value === null || value === undefined) {
|
|
774
|
+
stats.nullable = true;
|
|
775
|
+
stats.types['null'] = (stats.types['null'] || 0) + 1;
|
|
776
|
+
}
|
|
777
|
+
else if (Array.isArray(value)) {
|
|
778
|
+
stats.types['array'] = (stats.types['array'] || 0) + 1;
|
|
779
|
+
if (stats.sampleValues.length < 3) {
|
|
780
|
+
stats.sampleValues.push(value.slice(0, 2));
|
|
781
|
+
}
|
|
782
|
+
}
|
|
783
|
+
else if (value instanceof Date) {
|
|
784
|
+
stats.types['timestamp'] = (stats.types['timestamp'] || 0) + 1;
|
|
785
|
+
if (stats.sampleValues.length < 3) {
|
|
786
|
+
stats.sampleValues.push(value.toISOString());
|
|
787
|
+
}
|
|
788
|
+
}
|
|
789
|
+
else if (typeof value === 'object') {
|
|
790
|
+
stats.types['map'] = (stats.types['map'] || 0) + 1;
|
|
791
|
+
inferFields(value, fieldPath, fieldStats, depth + 1);
|
|
792
|
+
}
|
|
793
|
+
else {
|
|
794
|
+
const type = typeof value;
|
|
795
|
+
stats.types[type] = (stats.types[type] || 0) + 1;
|
|
796
|
+
if (stats.sampleValues.length < 3) {
|
|
797
|
+
stats.sampleValues.push(value);
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
}
|
|
801
|
+
}
|
|
802
|
+
//# sourceMappingURL=firestore.js.map
|