@theihtisham/mcp-server-firebase 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +362 -0
  3. package/dist/index.d.ts +3 -0
  4. package/dist/index.js +79 -0
  5. package/dist/services/firebase.d.ts +14 -0
  6. package/dist/services/firebase.js +163 -0
  7. package/dist/tools/auth.d.ts +3 -0
  8. package/dist/tools/auth.js +346 -0
  9. package/dist/tools/firestore.d.ts +3 -0
  10. package/dist/tools/firestore.js +802 -0
  11. package/dist/tools/functions.d.ts +3 -0
  12. package/dist/tools/functions.js +168 -0
  13. package/dist/tools/index.d.ts +10 -0
  14. package/dist/tools/index.js +30 -0
  15. package/dist/tools/messaging.d.ts +3 -0
  16. package/dist/tools/messaging.js +296 -0
  17. package/dist/tools/realtime-db.d.ts +4 -0
  18. package/dist/tools/realtime-db.js +271 -0
  19. package/dist/tools/storage.d.ts +3 -0
  20. package/dist/tools/storage.js +279 -0
  21. package/dist/tools/types.d.ts +11 -0
  22. package/dist/tools/types.js +3 -0
  23. package/dist/utils/cache.d.ts +16 -0
  24. package/dist/utils/cache.js +75 -0
  25. package/dist/utils/errors.d.ts +15 -0
  26. package/dist/utils/errors.js +94 -0
  27. package/dist/utils/index.d.ts +5 -0
  28. package/dist/utils/index.js +37 -0
  29. package/dist/utils/pagination.d.ts +28 -0
  30. package/dist/utils/pagination.js +75 -0
  31. package/dist/utils/validation.d.ts +22 -0
  32. package/dist/utils/validation.js +172 -0
  33. package/package.json +53 -0
  34. package/src/index.ts +94 -0
  35. package/src/services/firebase.ts +140 -0
  36. package/src/tools/auth.ts +375 -0
  37. package/src/tools/firestore.ts +931 -0
  38. package/src/tools/functions.ts +189 -0
  39. package/src/tools/index.ts +24 -0
  40. package/src/tools/messaging.ts +324 -0
  41. package/src/tools/realtime-db.ts +307 -0
  42. package/src/tools/storage.ts +314 -0
  43. package/src/tools/types.ts +10 -0
  44. package/src/utils/cache.ts +82 -0
  45. package/src/utils/errors.ts +110 -0
  46. package/src/utils/index.ts +4 -0
  47. package/src/utils/pagination.ts +105 -0
  48. package/src/utils/validation.ts +212 -0
  49. package/tests/cache.test.ts +139 -0
  50. package/tests/errors.test.ts +132 -0
  51. package/tests/firebase-service.test.ts +46 -0
  52. package/tests/pagination.test.ts +26 -0
  53. package/tests/tools.test.ts +226 -0
  54. package/tests/validation.test.ts +216 -0
  55. package/tsconfig.json +26 -0
  56. package/vitest.config.ts +15 -0
@@ -0,0 +1,931 @@
1
+ import { getFirestore } from '../services/firebase.js';
2
+ import {
3
+ validateCollectionPath,
4
+ validateDocumentPath,
5
+ validateWhereField,
6
+ validateOperator,
7
+ sanitizeData,
8
+ handleFirebaseError,
9
+ formatSuccess,
10
+ formatListResult,
11
+ firestoreCache,
12
+ schemaCache,
13
+ } from '../utils/index.js';
14
+ import type { ToolDefinition } from './types.js';
15
+ import {
16
+ Firestore,
17
+ Query,
18
+ WhereFilterOp,
19
+ DocumentSnapshot,
20
+ FieldValue,
21
+ AggregateField,
22
+ AggregateSpec,
23
+ } from 'firebase-admin/firestore';
24
+
25
+ // ============================================================
26
+ // FIRESTORE TOOLS
27
+ // ============================================================
28
+
29
+ export const firestoreTools: ToolDefinition[] = [
30
+ // ── firestore_query ──────────────────────────────────
31
+ {
32
+ name: 'firestore_query',
33
+ description:
34
+ 'Query Firestore collections with filtering, ordering, and pagination. ' +
35
+ 'Supports where clauses, order by, limit, and cursor-based pagination.',
36
+ inputSchema: {
37
+ type: 'object' as const,
38
+ properties: {
39
+ collection: {
40
+ type: 'string',
41
+ description: 'Collection path (e.g., "users" or "users/uid1/orders")',
42
+ },
43
+ where: {
44
+ type: 'array',
45
+ items: {
46
+ type: 'object',
47
+ properties: {
48
+ field: { type: 'string' },
49
+ operator: { type: 'string', enum: ['==', '!=', '<', '<=', '>', '>=', 'array-contains', 'array-contains-any', 'in', 'not-in'] },
50
+ value: { description: 'Value to compare against' },
51
+ },
52
+ required: ['field', 'operator', 'value'],
53
+ },
54
+ description: 'Array of where clauses for filtering',
55
+ },
56
+ orderBy: {
57
+ type: 'array',
58
+ items: {
59
+ type: 'object',
60
+ properties: {
61
+ field: { type: 'string' },
62
+ direction: { type: 'string', enum: ['asc', 'desc'] },
63
+ },
64
+ required: ['field'],
65
+ },
66
+ description: 'Array of order-by specifications',
67
+ },
68
+ limit: { type: 'number', description: 'Maximum results (1-10000, default 100)' },
69
+ pageToken: { type: 'string', description: 'Pagination token from previous query result' },
70
+ select: {
71
+ type: 'array',
72
+ items: { type: 'string' },
73
+ description: 'Fields to include in the result (projection)',
74
+ },
75
+ },
76
+ required: ['collection'],
77
+ },
78
+ handler: async (args: Record<string, unknown>) => {
79
+ try {
80
+ const collectionPath = validateCollectionPath(args['collection'] as string);
81
+ const limit = Math.min((args['limit'] as number) || 100, 10000);
82
+ const pageToken = args['pageToken'] as string | undefined;
83
+ const whereClauses = (args['where'] as Array<{ field: string; operator: string; value: unknown }>) || [];
84
+ const orderByClauses = (args['orderBy'] as Array<{ field: string; direction?: string }>) || [];
85
+ const selectFields = args['select'] as string[] | undefined;
86
+
87
+ const db = getFirestore();
88
+ let query: Query = db.collection(collectionPath);
89
+
90
+ // Apply where clauses
91
+ for (const w of whereClauses) {
92
+ const field = validateWhereField(w.field);
93
+ const op = validateOperator(w.operator);
94
+ query = query.where(field, op as WhereFilterOp, w.value);
95
+ }
96
+
97
+ // Apply orderBy
98
+ for (const o of orderByClauses) {
99
+ query = query.orderBy(o.field, (o.direction || 'asc') as 'asc' | 'desc');
100
+ }
101
+
102
+ // Apply pagination
103
+ if (pageToken) {
104
+ const decodedPath = Buffer.from(pageToken, 'base64').toString('utf-8');
105
+ const lastDocSnap = await db.doc(decodedPath).get();
106
+ if (lastDocSnap.exists) {
107
+ query = query.startAfter(lastDocSnap);
108
+ }
109
+ }
110
+
111
+ query = query.limit(limit);
112
+
113
+ // Apply field selection
114
+ if (selectFields && selectFields.length > 0) {
115
+ query = query.select(...selectFields);
116
+ }
117
+
118
+ const snapshot = await query.get();
119
+ const docs = snapshot.docs.map((doc: DocumentSnapshot) => ({
120
+ id: doc.id,
121
+ path: doc.ref.path,
122
+ data: doc.data(),
123
+ }));
124
+
125
+ let nextPageToken: string | undefined;
126
+ if (snapshot.size === limit && snapshot.docs.length > 0) {
127
+ const lastDoc = snapshot.docs[snapshot.docs.length - 1]!;
128
+ nextPageToken = Buffer.from(lastDoc.ref.path).toString('base64');
129
+ }
130
+
131
+ return formatListResult(docs, nextPageToken);
132
+ } catch (err) {
133
+ handleFirebaseError(err, 'firestore', 'query');
134
+ }
135
+ },
136
+ },
137
+
138
+ // ── firestore_get_document ────────────────────────────
139
+ {
140
+ name: 'firestore_get_document',
141
+ description: 'Get a single Firestore document by its full path.',
142
+ inputSchema: {
143
+ type: 'object' as const,
144
+ properties: {
145
+ path: { type: 'string', description: 'Full document path (e.g., "users/uid123")' },
146
+ },
147
+ required: ['path'],
148
+ },
149
+ handler: async (args: Record<string, unknown>) => {
150
+ try {
151
+ const docPath = validateDocumentPath(args['path'] as string);
152
+ const cacheKey = `doc:${docPath}`;
153
+
154
+ const cached = firestoreCache.get(cacheKey);
155
+ if (cached) {
156
+ return formatSuccess(cached);
157
+ }
158
+
159
+ const db = getFirestore();
160
+ const docSnap = await db.doc(docPath).get();
161
+
162
+ if (!docSnap.exists) {
163
+ return formatSuccess({
164
+ exists: false,
165
+ path: docPath,
166
+ message: `Document "${docPath}" does not exist.`,
167
+ });
168
+ }
169
+
170
+ const result = {
171
+ exists: true,
172
+ id: docSnap.id,
173
+ path: docSnap.ref.path,
174
+ data: docSnap.data(),
175
+ };
176
+
177
+ firestoreCache.set(cacheKey, result, 10_000);
178
+ return formatSuccess(result);
179
+ } catch (err) {
180
+ handleFirebaseError(err, 'firestore', 'get_document');
181
+ }
182
+ },
183
+ },
184
+
185
+ // ── firestore_add_document ────────────────────────────
186
+ {
187
+ name: 'firestore_add_document',
188
+ description:
189
+ 'Add a new document to a Firestore collection. Auto-generates a document ID.',
190
+ inputSchema: {
191
+ type: 'object' as const,
192
+ properties: {
193
+ collection: { type: 'string', description: 'Collection path' },
194
+ data: { type: 'object', description: 'Document data' },
195
+ },
196
+ required: ['collection', 'data'],
197
+ },
198
+ handler: async (args: Record<string, unknown>) => {
199
+ try {
200
+ const collectionPath = validateCollectionPath(args['collection'] as string);
201
+ const data = sanitizeData(args['data']);
202
+
203
+ const db = getFirestore();
204
+ const docRef = await db.collection(collectionPath).add({
205
+ ...(data as Record<string, unknown>),
206
+ _createdAt: FieldValue.serverTimestamp(),
207
+ _updatedAt: FieldValue.serverTimestamp(),
208
+ });
209
+
210
+ firestoreCache.invalidatePrefix(`doc:${collectionPath}`);
211
+
212
+ return formatSuccess({
213
+ id: docRef.id,
214
+ path: docRef.path,
215
+ message: `Document added successfully to "${collectionPath}".`,
216
+ });
217
+ } catch (err) {
218
+ handleFirebaseError(err, 'firestore', 'add_document');
219
+ }
220
+ },
221
+ },
222
+
223
+ // ── firestore_set_document ────────────────────────────
224
+ {
225
+ name: 'firestore_set_document',
226
+ description:
227
+ 'Create or overwrite a document at a specific path. Use merge: true to merge with existing data.',
228
+ inputSchema: {
229
+ type: 'object' as const,
230
+ properties: {
231
+ path: { type: 'string', description: 'Full document path (e.g., "users/uid123")' },
232
+ data: { type: 'object', description: 'Document data' },
233
+ merge: { type: 'boolean', description: 'If true, merge with existing data instead of overwriting (default: false)' },
234
+ },
235
+ required: ['path', 'data'],
236
+ },
237
+ handler: async (args: Record<string, unknown>) => {
238
+ try {
239
+ const docPath = validateDocumentPath(args['path'] as string);
240
+ const data = sanitizeData(args['data']);
241
+ const merge = (args['merge'] as boolean) || false;
242
+
243
+ const db = getFirestore();
244
+ await db.doc(docPath).set(
245
+ {
246
+ ...(data as Record<string, unknown>),
247
+ _updatedAt: FieldValue.serverTimestamp(),
248
+ },
249
+ { merge }
250
+ );
251
+
252
+ firestoreCache.delete(`doc:${docPath}`);
253
+ const parts = docPath.split('/');
254
+ parts.pop();
255
+ if (parts.length > 0) {
256
+ firestoreCache.invalidatePrefix(`doc:${parts.join('/')}`);
257
+ }
258
+
259
+ return formatSuccess({
260
+ path: docPath,
261
+ merged: merge,
262
+ message: `Document "${docPath}" ${merge ? 'merged' : 'set'} successfully.`,
263
+ });
264
+ } catch (err) {
265
+ handleFirebaseError(err, 'firestore', 'set_document');
266
+ }
267
+ },
268
+ },
269
+
270
+ // ── firestore_update_document ─────────────────────────
271
+ {
272
+ name: 'firestore_update_document',
273
+ description:
274
+ 'Update specific fields of an existing document. Only the provided fields are modified. ' +
275
+ 'Use dot notation for nested fields (e.g., "address.city").',
276
+ inputSchema: {
277
+ type: 'object' as const,
278
+ properties: {
279
+ path: { type: 'string', description: 'Full document path' },
280
+ data: { type: 'object', description: 'Fields to update (supports dot notation for nested fields)' },
281
+ },
282
+ required: ['path', 'data'],
283
+ },
284
+ handler: async (args: Record<string, unknown>) => {
285
+ try {
286
+ const docPath = validateDocumentPath(args['path'] as string);
287
+ const data = sanitizeData(args['data']);
288
+
289
+ const db = getFirestore();
290
+ const docRef = db.doc(docPath);
291
+
292
+ const docSnap = await docRef.get();
293
+ if (!docSnap.exists) {
294
+ return formatSuccess({
295
+ success: false,
296
+ message: `Document "${docPath}" does not exist. Use firestore_set_document to create it.`,
297
+ });
298
+ }
299
+
300
+ await docRef.update({
301
+ ...(data as Record<string, unknown>),
302
+ _updatedAt: FieldValue.serverTimestamp(),
303
+ });
304
+
305
+ firestoreCache.delete(`doc:${docPath}`);
306
+
307
+ return formatSuccess({
308
+ path: docPath,
309
+ message: `Document "${docPath}" updated successfully.`,
310
+ });
311
+ } catch (err) {
312
+ handleFirebaseError(err, 'firestore', 'update_document');
313
+ }
314
+ },
315
+ },
316
+
317
+ // ── firestore_delete_document ─────────────────────────
318
+ {
319
+ name: 'firestore_delete_document',
320
+ description:
321
+ 'Delete a Firestore document. Optionally delete all subcollections recursively.',
322
+ inputSchema: {
323
+ type: 'object' as const,
324
+ properties: {
325
+ path: { type: 'string', description: 'Full document path' },
326
+ recursive: { type: 'boolean', description: 'If true, delete all subcollections recursively (default: false)' },
327
+ },
328
+ required: ['path'],
329
+ },
330
+ handler: async (args: Record<string, unknown>) => {
331
+ try {
332
+ const docPath = validateDocumentPath(args['path'] as string);
333
+ const recursive = (args['recursive'] as boolean) || false;
334
+
335
+ const db = getFirestore();
336
+ const docRef = db.doc(docPath);
337
+
338
+ if (recursive) {
339
+ await deleteDocumentRecursive(db, docRef);
340
+ } else {
341
+ await docRef.delete();
342
+ }
343
+
344
+ firestoreCache.delete(`doc:${docPath}`);
345
+ const parts = docPath.split('/');
346
+ parts.pop();
347
+ if (parts.length > 0) {
348
+ firestoreCache.invalidatePrefix(`doc:${parts.join('/')}`);
349
+ }
350
+
351
+ return formatSuccess({
352
+ path: docPath,
353
+ recursive,
354
+ message: `Document "${docPath}" deleted${recursive ? ' recursively' : ''}.`,
355
+ });
356
+ } catch (err) {
357
+ handleFirebaseError(err, 'firestore', 'delete_document');
358
+ }
359
+ },
360
+ },
361
+
362
+ // ── firestore_batch_write ─────────────────────────────
363
+ {
364
+ name: 'firestore_batch_write',
365
+ description:
366
+ 'Execute multiple write operations atomically in a single batch. ' +
367
+ 'Supports set, update, and delete operations. Maximum 500 operations per batch.',
368
+ inputSchema: {
369
+ type: 'object' as const,
370
+ properties: {
371
+ operations: {
372
+ type: 'array',
373
+ items: {
374
+ type: 'object',
375
+ properties: {
376
+ type: { type: 'string', enum: ['set', 'update', 'delete'], description: 'Operation type' },
377
+ path: { type: 'string', description: 'Document path' },
378
+ data: { type: 'object', description: 'Data for set/update operations' },
379
+ merge: { type: 'boolean', description: 'For set operations, merge with existing data' },
380
+ },
381
+ required: ['type', 'path'],
382
+ },
383
+ description: 'Array of write operations (max 500)',
384
+ },
385
+ },
386
+ required: ['operations'],
387
+ },
388
+ handler: async (args: Record<string, unknown>) => {
389
+ try {
390
+ const operations = args['operations'] as Array<{
391
+ type: 'set' | 'update' | 'delete';
392
+ path: string;
393
+ data?: Record<string, unknown>;
394
+ merge?: boolean;
395
+ }>;
396
+
397
+ if (!operations || operations.length === 0) {
398
+ throw new Error('Operations array cannot be empty.');
399
+ }
400
+ if (operations.length > 500) {
401
+ throw new Error(
402
+ 'Batch write exceeds maximum of 500 operations. ' +
403
+ 'Split into multiple batch writes of 500 or fewer operations.'
404
+ );
405
+ }
406
+
407
+ const db = getFirestore();
408
+ const batch = db.batch();
409
+
410
+ for (const op of operations) {
411
+ const docPath = validateDocumentPath(op.path);
412
+ const ref = db.doc(docPath);
413
+
414
+ switch (op.type) {
415
+ case 'set': {
416
+ const data = sanitizeData(op.data) as Record<string, unknown>;
417
+ batch.set(ref, {
418
+ ...data,
419
+ _updatedAt: FieldValue.serverTimestamp(),
420
+ }, { merge: op.merge ?? false });
421
+ break;
422
+ }
423
+ case 'update': {
424
+ const data = sanitizeData(op.data) as Record<string, unknown>;
425
+ batch.update(ref, {
426
+ ...data,
427
+ _updatedAt: FieldValue.serverTimestamp(),
428
+ });
429
+ break;
430
+ }
431
+ case 'delete': {
432
+ batch.delete(ref);
433
+ break;
434
+ }
435
+ }
436
+ }
437
+
438
+ await batch.commit();
439
+
440
+ for (const op of operations) {
441
+ firestoreCache.delete(`doc:${op.path}`);
442
+ }
443
+
444
+ return formatSuccess({
445
+ operationsCount: operations.length,
446
+ message: `Batch write completed successfully with ${operations.length} operations.`,
447
+ });
448
+ } catch (err) {
449
+ handleFirebaseError(err, 'firestore', 'batch_write');
450
+ }
451
+ },
452
+ },
453
+
454
+ // ── firestore_transaction ─────────────────────────────
455
+ {
456
+ name: 'firestore_transaction',
457
+ description:
458
+ 'Execute a transaction that reads documents and performs conditional writes atomically.',
459
+ inputSchema: {
460
+ type: 'object' as const,
461
+ properties: {
462
+ readPaths: {
463
+ type: 'array',
464
+ items: { type: 'string' },
465
+ description: 'Document paths to read at the start of the transaction',
466
+ },
467
+ operations: {
468
+ type: 'array',
469
+ items: {
470
+ type: 'object',
471
+ properties: {
472
+ type: { type: 'string', enum: ['set', 'update', 'delete'] },
473
+ path: { type: 'string' },
474
+ data: { type: 'object' },
475
+ merge: { type: 'boolean' },
476
+ },
477
+ required: ['type', 'path'],
478
+ },
479
+ description: 'Write operations to execute',
480
+ },
481
+ },
482
+ required: ['readPaths', 'operations'],
483
+ },
484
+ handler: async (args: Record<string, unknown>) => {
485
+ try {
486
+ const readPaths = (args['readPaths'] as string[]).map((p) => validateDocumentPath(p));
487
+ const operations = args['operations'] as Array<{
488
+ type: 'set' | 'update' | 'delete';
489
+ path: string;
490
+ data?: Record<string, unknown>;
491
+ merge?: boolean;
492
+ }>;
493
+
494
+ const db = getFirestore();
495
+
496
+ const result = await db.runTransaction(async (transaction) => {
497
+ const reads: Record<string, unknown> = {};
498
+ for (const path of readPaths) {
499
+ const docSnap = await transaction.get(db.doc(path));
500
+ reads[path] = docSnap.exists ? { exists: true, data: docSnap.data() } : { exists: false };
501
+ }
502
+
503
+ for (const op of operations) {
504
+ const docPath = validateDocumentPath(op.path);
505
+ const ref = db.doc(docPath);
506
+
507
+ switch (op.type) {
508
+ case 'set': {
509
+ const data = sanitizeData(op.data) as Record<string, unknown>;
510
+ transaction.set(ref, {
511
+ ...data,
512
+ _updatedAt: FieldValue.serverTimestamp(),
513
+ }, { merge: op.merge ?? false });
514
+ break;
515
+ }
516
+ case 'update': {
517
+ const data = sanitizeData(op.data) as Record<string, unknown>;
518
+ transaction.update(ref, {
519
+ ...data,
520
+ _updatedAt: FieldValue.serverTimestamp(),
521
+ });
522
+ break;
523
+ }
524
+ case 'delete': {
525
+ transaction.delete(ref);
526
+ break;
527
+ }
528
+ }
529
+ }
530
+
531
+ return reads;
532
+ });
533
+
534
+ return formatSuccess({
535
+ readDocuments: result,
536
+ writesApplied: operations.length,
537
+ message: 'Transaction completed successfully.',
538
+ });
539
+ } catch (err) {
540
+ handleFirebaseError(err, 'firestore', 'transaction');
541
+ }
542
+ },
543
+ },
544
+
545
+ // ── firestore_list_collections ────────────────────────
546
+ {
547
+ name: 'firestore_list_collections',
548
+ description:
549
+ 'List subcollections of a document, or root-level collections if no document path is provided.',
550
+ inputSchema: {
551
+ type: 'object' as const,
552
+ properties: {
553
+ documentPath: { type: 'string', description: 'Optional document path to list its subcollections' },
554
+ },
555
+ },
556
+ handler: async (args: Record<string, unknown>) => {
557
+ try {
558
+ const db = getFirestore();
559
+ const docPath = args['documentPath'] as string | undefined;
560
+
561
+ let collections: FirebaseFirestore.CollectionReference[];
562
+ if (docPath) {
563
+ validateDocumentPath(docPath);
564
+ collections = await db.doc(docPath).listCollections();
565
+ } else {
566
+ collections = await db.listCollections();
567
+ }
568
+
569
+ const result = collections.map((col) => ({
570
+ id: col.id,
571
+ path: col.path,
572
+ }));
573
+
574
+ return formatSuccess(result);
575
+ } catch (err) {
576
+ handleFirebaseError(err, 'firestore', 'list_collections');
577
+ }
578
+ },
579
+ },
580
+
581
+ // ── firestore_list_subcollections ─────────────────────
582
+ {
583
+ name: 'firestore_list_subcollections',
584
+ description: 'List all subcollections of a specific document.',
585
+ inputSchema: {
586
+ type: 'object' as const,
587
+ properties: {
588
+ path: { type: 'string', description: 'Document path to inspect' },
589
+ },
590
+ required: ['path'],
591
+ },
592
+ handler: async (args: Record<string, unknown>) => {
593
+ try {
594
+ const docPath = validateDocumentPath(args['path'] as string);
595
+ const db = getFirestore();
596
+ const collections = await db.doc(docPath).listCollections();
597
+
598
+ const result = collections.map((col) => ({
599
+ id: col.id,
600
+ path: col.path,
601
+ }));
602
+
603
+ return formatSuccess({
604
+ documentPath: docPath,
605
+ subcollections: result,
606
+ });
607
+ } catch (err) {
608
+ handleFirebaseError(err, 'firestore', 'list_subcollections');
609
+ }
610
+ },
611
+ },
612
+
613
+ // ── firestore_aggregate_query ─────────────────────────
614
+ {
615
+ name: 'firestore_aggregate_query',
616
+ description:
617
+ 'Run aggregation queries (count, sum, average) on Firestore collections with optional filters.',
618
+ inputSchema: {
619
+ type: 'object' as const,
620
+ properties: {
621
+ collection: { type: 'string', description: 'Collection path' },
622
+ aggregations: {
623
+ type: 'array',
624
+ items: {
625
+ type: 'object',
626
+ properties: {
627
+ type: { type: 'string', enum: ['count', 'sum', 'avg'], description: 'Aggregation type' },
628
+ field: { type: 'string', description: 'Field to aggregate (not needed for count)' },
629
+ alias: { type: 'string', description: 'Alias for the result' },
630
+ },
631
+ required: ['type'],
632
+ },
633
+ description: 'Aggregation operations to perform',
634
+ },
635
+ where: {
636
+ type: 'array',
637
+ items: {
638
+ type: 'object',
639
+ properties: {
640
+ field: { type: 'string' },
641
+ operator: { type: 'string' },
642
+ value: {},
643
+ },
644
+ required: ['field', 'operator', 'value'],
645
+ },
646
+ description: 'Optional where clauses to filter before aggregating',
647
+ },
648
+ },
649
+ required: ['collection', 'aggregations'],
650
+ },
651
+ handler: async (args: Record<string, unknown>) => {
652
+ try {
653
+ const collectionPath = validateCollectionPath(args['collection'] as string);
654
+ const aggregations = args['aggregations'] as Array<{
655
+ type: 'count' | 'sum' | 'avg';
656
+ field?: string;
657
+ alias?: string;
658
+ }>;
659
+ const whereClauses = (args['where'] as Array<{ field: string; operator: string; value: unknown }>) || [];
660
+
661
+ const db = getFirestore();
662
+ let query: Query = db.collection(collectionPath);
663
+
664
+ for (const w of whereClauses) {
665
+ const field = validateWhereField(w.field);
666
+ const op = validateOperator(w.operator);
667
+ query = query.where(field, op as WhereFilterOp, w.value);
668
+ }
669
+
670
+ const aggregateSpec: AggregateSpec = {};
671
+ for (const agg of aggregations) {
672
+ const alias = agg.alias || `${agg.type}_${agg.field || 'all'}`;
673
+ switch (agg.type) {
674
+ case 'count':
675
+ aggregateSpec[alias] = AggregateField.count();
676
+ break;
677
+ case 'sum':
678
+ if (!agg.field) throw new Error('Sum aggregation requires a "field" parameter.');
679
+ aggregateSpec[alias] = AggregateField.sum(agg.field);
680
+ break;
681
+ case 'avg':
682
+ if (!agg.field) throw new Error('Average aggregation requires a "field" parameter.');
683
+ aggregateSpec[alias] = AggregateField.average(agg.field);
684
+ break;
685
+ }
686
+ }
687
+
688
+ const snapshot = await query.aggregate(aggregateSpec).get();
689
+ const results: Record<string, unknown> = {};
690
+ for (const agg of aggregations) {
691
+ const alias = agg.alias || `${agg.type}_${agg.field || 'all'}`;
692
+ results[alias] = snapshot.data()[alias];
693
+ }
694
+
695
+ return formatSuccess(results);
696
+ } catch (err) {
697
+ handleFirebaseError(err, 'firestore', 'aggregate_query');
698
+ }
699
+ },
700
+ },
701
+
702
+ // ── firestore_listen_changes ──────────────────────────
703
+ {
704
+ name: 'firestore_listen_changes',
705
+ description:
706
+ 'Listen for real-time changes on a Firestore collection or document for a short duration.',
707
+ inputSchema: {
708
+ type: 'object' as const,
709
+ properties: {
710
+ path: { type: 'string', description: 'Collection or document path to watch' },
711
+ durationMs: { type: 'number', description: 'Duration to listen in milliseconds (1000-30000, default 5000)' },
712
+ },
713
+ required: ['path'],
714
+ },
715
+ handler: async (args: Record<string, unknown>) => {
716
+ try {
717
+ const path = (args['path'] as string).trim();
718
+ const durationMs = Math.min(Math.max((args['durationMs'] as number) || 5000, 1000), 30000);
719
+
720
+ const db = getFirestore();
721
+ const segments = path.split('/');
722
+ const changes: Array<{
723
+ type: string;
724
+ path: string;
725
+ data?: unknown;
726
+ duration: number;
727
+ }> = [];
728
+
729
+ let unsubscribe: (() => void) | undefined;
730
+
731
+ const startTime = Date.now();
732
+
733
+ await new Promise<void>((resolve) => {
734
+ if (segments.length % 2 === 0) {
735
+ validateDocumentPath(path);
736
+ unsubscribe = db.doc(path).onSnapshot(
737
+ (snap) => {
738
+ changes.push({
739
+ type: snap.exists ? 'modified' : 'removed',
740
+ path: snap.ref.path,
741
+ data: snap.data(),
742
+ duration: Date.now() - startTime,
743
+ });
744
+ },
745
+ (err) => {
746
+ changes.push({
747
+ type: 'error',
748
+ path,
749
+ data: err.message,
750
+ duration: Date.now() - startTime,
751
+ });
752
+ }
753
+ );
754
+ } else {
755
+ validateCollectionPath(path);
756
+ unsubscribe = db.collection(path).onSnapshot(
757
+ (snap) => {
758
+ snap.docChanges().forEach((change) => {
759
+ changes.push({
760
+ type: change.type,
761
+ path: change.doc.ref.path,
762
+ data: change.doc.data(),
763
+ duration: Date.now() - startTime,
764
+ });
765
+ });
766
+ },
767
+ (err) => {
768
+ changes.push({
769
+ type: 'error',
770
+ path,
771
+ data: err.message,
772
+ duration: Date.now() - startTime,
773
+ });
774
+ }
775
+ );
776
+ }
777
+
778
+ setTimeout(() => {
779
+ if (unsubscribe) unsubscribe();
780
+ resolve();
781
+ }, durationMs);
782
+ });
783
+
784
+ return formatSuccess({
785
+ path,
786
+ listenedForMs: durationMs,
787
+ changesDetected: changes.length,
788
+ changes,
789
+ });
790
+ } catch (err) {
791
+ handleFirebaseError(err, 'firestore', 'listen_changes');
792
+ }
793
+ },
794
+ },
795
+
796
+ // ── firestore_infer_schema ────────────────────────────
797
+ {
798
+ name: 'firestore_infer_schema',
799
+ description:
800
+ 'Infer the schema of a Firestore collection by sampling documents. Returns field names, types, and occurrence counts.',
801
+ inputSchema: {
802
+ type: 'object' as const,
803
+ properties: {
804
+ collection: { type: 'string', description: 'Collection path to analyze' },
805
+ sampleSize: { type: 'number', description: 'Number of documents to sample (1-100, default 20)' },
806
+ },
807
+ required: ['collection'],
808
+ },
809
+ handler: async (args: Record<string, unknown>) => {
810
+ try {
811
+ const collectionPath = validateCollectionPath(args['collection'] as string);
812
+ const sampleSize = Math.min(Math.max((args['sampleSize'] as number) || 20, 1), 100);
813
+
814
+ const cacheKey = `schema:${collectionPath}`;
815
+ const cached = schemaCache.get(cacheKey);
816
+ if (cached) {
817
+ return formatSuccess(cached);
818
+ }
819
+
820
+ const db = getFirestore();
821
+ const snapshot = await db.collection(collectionPath).limit(sampleSize).get();
822
+
823
+ if (snapshot.empty) {
824
+ return formatSuccess({
825
+ collection: collectionPath,
826
+ documentCount: 0,
827
+ message: 'Collection is empty. No schema to infer.',
828
+ });
829
+ }
830
+
831
+ const fieldStats: Record<string, {
832
+ types: Record<string, number>;
833
+ count: number;
834
+ nullable: boolean;
835
+ sampleValues: unknown[];
836
+ }> = {};
837
+
838
+ for (const doc of snapshot.docs) {
839
+ const data = doc.data();
840
+ inferFields(data, '', fieldStats, 0);
841
+ }
842
+
843
+ const totalDocs = snapshot.size;
844
+ const schema: Record<string, unknown> = {};
845
+ for (const [field, stats] of Object.entries(fieldStats)) {
846
+ const dominantType = Object.entries(stats.types)
847
+ .sort((a, b) => b[1] - a[1])[0]![0];
848
+ schema[field] = {
849
+ type: dominantType,
850
+ presence: `${((stats.count / totalDocs) * 100).toFixed(1)}%`,
851
+ nullable: stats.nullable,
852
+ sampleValues: stats.sampleValues.slice(0, 3),
853
+ };
854
+ }
855
+
856
+ const result = {
857
+ collection: collectionPath,
858
+ documentsSampled: totalDocs,
859
+ fields: schema,
860
+ };
861
+
862
+ schemaCache.set(cacheKey, result, 10 * 60 * 1000);
863
+ return formatSuccess(result);
864
+ } catch (err) {
865
+ handleFirebaseError(err, 'firestore', 'infer_schema');
866
+ }
867
+ },
868
+ },
869
+ ];
870
+
871
+ // ── Helpers ──────────────────────────────────────────────
872
+
873
+ async function deleteDocumentRecursive(db: Firestore, docRef: FirebaseFirestore.DocumentReference): Promise<void> {
874
+ const collections = await docRef.listCollections();
875
+ for (const col of collections) {
876
+ const snapshot = await col.limit(500).get();
877
+ for (const doc of snapshot.docs) {
878
+ await deleteDocumentRecursive(db, doc.ref);
879
+ }
880
+ }
881
+ await docRef.delete();
882
+ }
883
+
884
+ function inferFields(
885
+ data: unknown,
886
+ prefix: string,
887
+ fieldStats: Record<string, { types: Record<string, number>; count: number; nullable: boolean; sampleValues: unknown[] }>,
888
+ depth: number
889
+ ): void {
890
+ if (depth > 10) return;
891
+
892
+ if (data === null || data === undefined) return;
893
+
894
+ if (typeof data !== 'object' || Array.isArray(data)) return;
895
+
896
+ for (const [key, value] of Object.entries(data as Record<string, unknown>)) {
897
+ if (key.startsWith('_')) continue;
898
+ const fieldPath = prefix ? `${prefix}.${key}` : key;
899
+
900
+ if (!fieldStats[fieldPath]) {
901
+ fieldStats[fieldPath] = { types: {}, count: 0, nullable: false, sampleValues: [] };
902
+ }
903
+
904
+ const stats = fieldStats[fieldPath]!;
905
+ stats.count++;
906
+
907
+ if (value === null || value === undefined) {
908
+ stats.nullable = true;
909
+ stats.types['null'] = (stats.types['null'] || 0) + 1;
910
+ } else if (Array.isArray(value)) {
911
+ stats.types['array'] = (stats.types['array'] || 0) + 1;
912
+ if (stats.sampleValues.length < 3) {
913
+ stats.sampleValues.push(value.slice(0, 2));
914
+ }
915
+ } else if (value instanceof Date) {
916
+ stats.types['timestamp'] = (stats.types['timestamp'] || 0) + 1;
917
+ if (stats.sampleValues.length < 3) {
918
+ stats.sampleValues.push(value.toISOString());
919
+ }
920
+ } else if (typeof value === 'object') {
921
+ stats.types['map'] = (stats.types['map'] || 0) + 1;
922
+ inferFields(value, fieldPath, fieldStats, depth + 1);
923
+ } else {
924
+ const type = typeof value;
925
+ stats.types[type] = (stats.types[type] || 0) + 1;
926
+ if (stats.sampleValues.length < 3) {
927
+ stats.sampleValues.push(value);
928
+ }
929
+ }
930
+ }
931
+ }