fmea-api-mcp-server 1.1.0 → 1.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -6,8 +6,57 @@ import * as fs from "fs/promises";
6
6
  import * as fsSync from "fs";
7
7
  import * as path from "path";
8
8
  import { fileURLToPath } from "url";
9
+ import { getSynonyms } from "./synonyms.js";
9
10
  const __filename = fileURLToPath(import.meta.url);
10
11
  const __dirname = path.dirname(__filename);
12
+ /**
13
+ * Normalize token to singular form for better matching.
14
+ * Handles common English plural patterns.
15
+ */
16
+ function normalizeToken(token) {
17
+ const normalized = [token];
18
+ // Simple pluralization rules (token -> singular or singular -> plural)
19
+ const rules = [
20
+ // Remove 's' for common plurals
21
+ [/([a-z]+)s$/, '$1'],
22
+ // Remove 'es' for words ending with s, x, z, ch, sh
23
+ [/([a-z]+)es$/, '$1'],
24
+ // Remove 'ies' and replace with 'y'
25
+ [/([a-z]+)ies$/, '$1y'],
26
+ // Remove 'ves' and replace with 'f' (leaf -> leaves)
27
+ [/([a-z]+)ves$/, '$1f'],
28
+ // Remove 'men' and replace with 'man' (woman -> women)
29
+ [/([a-z]+)men$/, '$1man'],
30
+ ];
31
+ // Generate singular variants
32
+ for (const [pattern, replacement] of rules) {
33
+ if (pattern.test(token)) {
34
+ const singular = token.replace(pattern, replacement);
35
+ if (singular !== token && singular.length > 2) {
36
+ normalized.push(singular);
37
+ }
38
+ break;
39
+ }
40
+ }
41
+ // Add plural variant (append 's' if not ending with 's')
42
+ if (!token.endsWith('s') && token.length > 2) {
43
+ normalized.push(token + 's');
44
+ }
45
+ return normalized;
46
+ }
47
+ /**
48
+ * Expand tokens to include singular/plural variants
49
+ */
50
+ function expandTokenVariants(tokens) {
51
+ const variants = new Set(tokens);
52
+ for (const token of tokens) {
53
+ const normalized = normalizeToken(token);
54
+ for (const variant of normalized) {
55
+ variants.add(variant);
56
+ }
57
+ }
58
+ return variants;
59
+ }
11
60
  // Directory where endpoint definitions are stored.
12
61
  // Priority:
13
62
  // 1. Environment variable ENDPOINTS_DIR
@@ -119,7 +168,7 @@ class ApiDocsServer {
119
168
  description: "Page number for pagination (default: 1).",
120
169
  },
121
170
  },
122
- required: ["query"],
171
+ required: [],
123
172
  },
124
173
  },
125
174
  {
@@ -145,10 +194,21 @@ class ApiDocsServer {
145
194
  });
146
195
  this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
147
196
  if (request.params.name === "search_apis") {
148
- const query = String(request.params.arguments?.query).toLowerCase();
197
+ const query = request.params.arguments?.query ? String(request.params.arguments?.query).toLowerCase() : "";
149
198
  const method = request.params.arguments?.method ? String(request.params.arguments?.method).toUpperCase() : undefined;
150
199
  const version = request.params.arguments?.version ? String(request.params.arguments?.version).toLowerCase() : undefined;
151
200
  const page = request.params.arguments?.page ? Number(request.params.arguments?.page) : 1;
201
+ if (!query) {
202
+ const categories = await this.listApiCategories();
203
+ return {
204
+ content: [
205
+ {
206
+ type: "text",
207
+ text: JSON.stringify(categories, null, 2),
208
+ },
209
+ ],
210
+ };
211
+ }
152
212
  const results = await this.searchInFiles(query, method, version, page);
153
213
  return {
154
214
  content: [
@@ -210,13 +270,43 @@ class ApiDocsServer {
210
270
  }
211
271
  return results;
212
272
  }
213
- // Smart search helper with scoring, filtering, limits, and pagination
273
+ // Helper to list API categories (directories) for exploration
274
+ async listApiCategories() {
275
+ const categories = [];
276
+ try {
277
+ const topLevel = await fs.readdir(ENDPOINTS_DIR);
278
+ for (const versionDir of topLevel) {
279
+ const versionPath = path.join(ENDPOINTS_DIR, versionDir);
280
+ const stat = await fs.stat(versionPath);
281
+ if (stat.isDirectory() && !versionDir.startsWith('.')) {
282
+ // Look one level deeper for domains (e.g. v1/projects)
283
+ const domains = await fs.readdir(versionPath);
284
+ for (const domain of domains) {
285
+ const domainPath = path.join(versionPath, domain);
286
+ const domainStat = await fs.stat(domainPath);
287
+ if (domainStat.isDirectory()) {
288
+ categories.push({
289
+ category: `${versionDir}/${domain}`,
290
+ description: `API endpoints for ${domain} (${versionDir})`
291
+ });
292
+ }
293
+ }
294
+ }
295
+ }
296
+ }
297
+ catch (e) {
298
+ console.error("Error listing categories:", e);
299
+ }
300
+ return {
301
+ categories: categories,
302
+ message: "Use 'search_apis' with a query to find specific endpoints, or 'get_api_details' to see full schemas."
303
+ };
304
+ }
305
+ // Smart search helper with BM25 scoring, Synonyms, and AND logic
214
306
  async searchInFiles(query, filterMethod, filterVersion, page = 1) {
215
307
  const files = await this.getAllFiles(ENDPOINTS_DIR);
216
- let allMatches = [];
217
- const isWildcard = query.trim() === "*" || query.trim() === "";
218
- // Tokenize query: split by space, filter empty
219
- const tokens = query.toLowerCase().split(/\s+/).filter(t => t.length > 0);
308
+ let documents = [];
309
+ // 1. Prepare Documents (Corpus)
220
310
  for (const filePath of files) {
221
311
  try {
222
312
  const content = await fs.readFile(filePath, "utf-8");
@@ -235,49 +325,23 @@ class ApiDocsServer {
235
325
  if (filterMethod && endpoint.method.toUpperCase() !== filterMethod) {
236
326
  continue;
237
327
  }
238
- // Scoring Logic
239
- let score = 0;
240
- if (isWildcard) {
241
- score = 1;
242
- }
243
- else {
244
- const summary = (endpoint.summary || "").toLowerCase();
245
- const description = (endpoint.description || "").toLowerCase();
246
- const apiPath = (endpoint.path || "").toLowerCase();
247
- const operationId = (endpoint.operationId || "").toLowerCase();
248
- // Calculate score for each token
249
- for (const token of tokens) {
250
- let tokenScore = 0;
251
- // Summary: Highest weight (Exact > Partial)
252
- if (summary === token)
253
- tokenScore += 20;
254
- else if (summary.includes(token))
255
- tokenScore += 10;
256
- // OperationID: High weight
257
- if (operationId === token)
258
- tokenScore += 15;
259
- else if (operationId.includes(token))
260
- tokenScore += 8;
261
- // Description: Medium weight
262
- if (description.includes(token))
263
- tokenScore += 5;
264
- // Path: Low weight
265
- if (apiPath.includes(token))
266
- tokenScore += 3;
267
- score += tokenScore;
268
- }
269
- }
270
- if (score > 0) {
271
- allMatches.push({
272
- score,
273
- file: fileName,
274
- method: endpoint.method,
275
- path: endpoint.path,
276
- summary: endpoint.summary,
277
- description: endpoint.description,
278
- operationId: endpoint.operationId
279
- });
280
- }
328
+ // Create searchable text blob
329
+ // Weighting: Summary (3x), OperationID (2x), Description (1x), Path (1x)
330
+ const searchableText = [
331
+ (endpoint.summary || "").toLowerCase().repeat(3),
332
+ (endpoint.operationId || "").toLowerCase().repeat(2),
333
+ (endpoint.description || "").toLowerCase(),
334
+ (endpoint.path || "").toLowerCase()
335
+ ].join(" ");
336
+ const tokens = searchableText.split(/\s+/).filter(t => t.length > 0);
337
+ // Expand document tokens with singular/plural variants
338
+ const expandedTokens = Array.from(expandTokenVariants(tokens));
339
+ documents.push({
340
+ file: fileName,
341
+ ...endpoint,
342
+ tokens: expandedTokens, // Expanded tokens for BM25 calculation
343
+ docLength: expandedTokens.length
344
+ });
281
345
  }
282
346
  }
283
347
  }
@@ -285,50 +349,121 @@ class ApiDocsServer {
285
349
  // Ignore parse errors
286
350
  }
287
351
  }
288
- const totalFound = allMatches.length;
352
+ const totalFound = documents.length;
289
353
  if (totalFound === 0) {
290
354
  return {
291
355
  results: [],
292
- message: `No results found for '${query}'. Try using '*' to list all endpoints, or check your version/method filters.`
356
+ message: `No results found for '${query}'.`
357
+ };
358
+ }
359
+ // 2. Query Processing (Synonyms + AND Logic)
360
+ const rawQueryTokens = query.toLowerCase().split(/\s+/).filter(t => t.length > 0);
361
+ // Check for Wildcard
362
+ if (rawQueryTokens.length === 0 || (rawQueryTokens.length === 1 && rawQueryTokens[0] === "*")) {
363
+ // Slice for pagination
364
+ const LIMIT = 10;
365
+ const totalPages = Math.ceil(totalFound / LIMIT);
366
+ const currentPage = Math.max(1, page);
367
+ const start = (currentPage - 1) * LIMIT;
368
+ const slice = documents.slice(start, start + LIMIT);
369
+ const finalResults = slice.map(({ tokens, docLength, ...rest }) => rest);
370
+ return {
371
+ results: finalResults,
372
+ meta: { total: totalFound, page: currentPage, totalPages: totalPages }
373
+ };
374
+ }
375
+ // Filter Documents: AND Logic with Synonym Expansion + Plural/Singular Normalization
376
+ // Every query token (or one of its synonyms/plurals) MUST be present in the document
377
+ const filteredDocs = documents.filter(doc => {
378
+ return rawQueryTokens.every(qToken => {
379
+ // Get synonyms first
380
+ const synonyms = getSynonyms(qToken);
381
+ // Expand with plural/singular variants
382
+ const expandedQuery = Array.from(expandTokenVariants(synonyms));
383
+ return expandedQuery.some((variant) => doc.tokens.includes(variant));
384
+ });
385
+ });
386
+ if (filteredDocs.length === 0) {
387
+ return {
388
+ results: [],
389
+ message: `No results found for '${query}'. Try fewer keywords or check spelling.`
293
390
  };
294
391
  }
295
- // Sort by score descending (only meaningful if not wildcard)
296
- if (!isWildcard) {
297
- allMatches.sort((a, b) => b.score - a.score);
392
+ // 3. BM25 Calculation on Filtered Docs
393
+ // Context: We calculate stats based on the *filtered* corpus or *full* corpus?
394
+ // Standard BM25 usages often use full corpus stats for IDF. We will use full corpus stats.
395
+ const k1 = 1.2;
396
+ const b = 0.75;
397
+ const avgdl = documents.reduce((acc, doc) => acc + doc.docLength, 0) / totalFound;
398
+ // Calculate IDF (using full corpus) for *expanded* tokens (synonyms + plurals)
399
+ // We expand query terms with both synonyms and plural/singular variants
400
+ const allQueryTerms = new Set();
401
+ rawQueryTokens.forEach(t => {
402
+ const synonyms = getSynonyms(t);
403
+ synonyms.forEach((s) => {
404
+ allQueryTerms.add(s);
405
+ // Also add plural/singular variants
406
+ const variants = normalizeToken(s);
407
+ variants.forEach(v => allQueryTerms.add(v));
408
+ });
409
+ });
410
+ const idf = {};
411
+ for (const term of allQueryTerms) {
412
+ let n_q = 0;
413
+ for (const doc of documents) {
414
+ if (doc.tokens.includes(term))
415
+ n_q++;
416
+ }
417
+ idf[term] = Math.log((totalFound - n_q + 0.5) / (n_q + 0.5) + 1);
298
418
  }
419
+ // Score Filtered Documents
420
+ let scoredDocs = filteredDocs.map(doc => {
421
+ let score = 0;
422
+ for (const qToken of rawQueryTokens) {
423
+ // Find which synonyms + plural variants of qToken are present in this doc
424
+ const synonyms = getSynonyms(qToken);
425
+ const expandedQuery = Array.from(expandTokenVariants(synonyms));
426
+ const presentTerms = expandedQuery.filter((term) => doc.tokens.includes(term));
427
+ // Sum up scores for all matching terms (synonyms + plurals)
428
+ for (const term of presentTerms) {
429
+ const f_q = doc.tokens.filter((t) => t === term).length;
430
+ const numerator = idf[term] * f_q * (k1 + 1);
431
+ const denominator = f_q + k1 * (1 - b + b * (doc.docLength / avgdl));
432
+ score += numerator / denominator;
433
+ }
434
+ }
435
+ return { ...doc, score };
436
+ });
437
+ // Sort by score descending
438
+ scoredDocs.sort((a, b) => b.score - a.score);
299
439
  // Pagination
300
440
  const LIMIT = 10;
301
- const totalPages = Math.ceil(totalFound / LIMIT);
441
+ const totalHits = scoredDocs.length;
442
+ const totalPages = Math.ceil(totalHits / LIMIT);
302
443
  const currentPage = Math.max(1, page);
303
444
  const start = (currentPage - 1) * LIMIT;
304
- const end = start + LIMIT;
305
- // Get the page slice
306
- const slice = allMatches.slice(start, end);
307
- // Post-processing: Add warnings for V1 endpoints if V2 exists
445
+ // Slice
446
+ const slice = scoredDocs.slice(start, start + LIMIT);
447
+ // Post-processing: Add warnings for V1 endpoints AND strip heavy fields
308
448
  const finalResults = await Promise.all(slice.map(async (item) => {
309
- // Remove score before returning to user
310
- const { score, ...rest } = item;
311
- if (rest.path && rest.path.includes("/v1/")) {
312
- const v2Path = rest.path.replace("/v1/", "/v2/");
313
- // We check if this v2 path exists using our internal lookup logic
314
- const v2Exists = await this.findEndpointInFiles(files, v2Path, rest.method);
315
- if (v2Exists) {
316
- rest.warning = "DEPRECATED: Version v1 is deprecated. Please use v2 endpoint: " + v2Path;
317
- }
449
+ // Deconstruct to remove heavy fields (parameters, requestBody, responses, tags) and internal scoring props
450
+ const { score, tokens, docLength, parameters, requestBody, responses, tags, file, ...lightweightItem } = item;
451
+ if (lightweightItem.path && lightweightItem.path.includes("/v1/")) {
452
+ // Check for V1 Deprecation
453
+ // Always generate a warning for v1 endpoints using the 3-step logic
454
+ // We do this check after determining it is a v1 endpoint
455
+ lightweightItem.warning = await this.generateDeprecationWarning(lightweightItem.path, lightweightItem.method);
318
456
  }
319
- return rest;
457
+ return lightweightItem;
320
458
  }));
321
459
  let warning = undefined;
322
460
  if (totalPages > 1) {
323
- warning = `Found ${totalFound} results. Showing page ${currentPage} of ${totalPages}.`;
324
- if (currentPage < totalPages) {
325
- warning += ` Use 'page: ${currentPage + 1}' to see next results.`;
326
- }
461
+ warning = `Found ${totalHits} results. Showing page ${currentPage} of ${totalPages}.`;
327
462
  }
328
463
  return {
329
464
  results: finalResults,
330
465
  meta: {
331
- total: totalFound,
466
+ total: totalHits,
332
467
  page: currentPage,
333
468
  totalPages: totalPages
334
469
  },
@@ -349,17 +484,15 @@ class ApiDocsServer {
349
484
  continue;
350
485
  }
351
486
  const result = {
352
- sourceFile: path.relative(ENDPOINTS_DIR, filePath),
353
487
  ...endpoint
354
488
  };
355
489
  // Check for V1 Deprecation
356
490
  if (apiPath.includes("/v1/")) {
357
- const v2Path = apiPath.replace("/v1/", "/v2/");
358
- const v2Exists = await this.findEndpointInFiles(files, v2Path, method);
359
- if (v2Exists) {
360
- // Inject a top-level deprecation warning in the details
361
- result.deprecation_warning = `NOTICE: This v1 endpoint is deprecated. A newer version (v2) exists at ${v2Path}`;
362
- }
491
+ // Check for V1 Deprecation
492
+ // Always generate a warning for v1 endpoints using the 3-step logic
493
+ // We do this check regardless of whether a direct v2 exists or not,
494
+ // because generateDeprecationWarning handles all cases.
495
+ result.warning = await this.generateDeprecationWarning(apiPath, method);
363
496
  }
364
497
  return result;
365
498
  }
@@ -394,6 +527,43 @@ class ApiDocsServer {
394
527
  }
395
528
  return false;
396
529
  }
530
+ // 3-Step Intelligent Warning Logic
531
+ async generateDeprecationWarning(v1Path, method) {
532
+ const files = await this.getAllFiles(ENDPOINTS_DIR);
533
+ // Step 1: Direct Match (v1 -> v2)
534
+ const v2Path = v1Path.replace("/v1/", "/v2/");
535
+ const v2Exists = await this.findEndpointInFiles(files, v2Path, method);
536
+ if (v2Exists) {
537
+ return `DEPRECATED: Version v1 is deprecated. Please use v2 endpoint: ${v2Path}`;
538
+ }
539
+ // Step 2: Domain Hint (e.g. /api/v1/auth/login -> Check /api/v2/auth/)
540
+ // Extract domain: /api/v1/projects/... -> projects, /api/v1/auth/... -> auth
541
+ const match = v1Path.match(/\/api\/v1\/([^/]+)/);
542
+ if (match && match[1]) {
543
+ const domain = match[1];
544
+ const v2DomainPathStart = `/api/v2/${domain}`;
545
+ // Check if any v2 endpoint exists in this domain
546
+ let domainExists = false;
547
+ for (const filePath of files) {
548
+ try {
549
+ const content = await fs.readFile(filePath, "utf-8");
550
+ const json = JSON.parse(content);
551
+ if (json.endpoints && Array.isArray(json.endpoints)) {
552
+ if (json.endpoints.some((ep) => ep.path && ep.path.startsWith(v2DomainPathStart))) {
553
+ domainExists = true;
554
+ break;
555
+ }
556
+ }
557
+ }
558
+ catch (e) { }
559
+ }
560
+ if (domainExists) {
561
+ return `LEGACY: Direct v2 replacement not found, but newer '${domain}' related features exist in v2. Please search for '${domain}' in v2.`;
562
+ }
563
+ }
564
+ // Step 3: General Legacy Warning
565
+ return "LEGACY: This v1 endpoint is deprecated and may be removed in the future.";
566
+ }
397
567
  async run() {
398
568
  const transport = new StdioServerTransport();
399
569
  await this.server.connect(transport);
@@ -0,0 +1,40 @@
1
+ export const SYNONYM_GROUPS = {
2
+ // Read / Retrieve
3
+ "get": ["fetch", "retrieve", "read", "load", "find", "search", "query", "list", "show"],
4
+ "find": ["get", "search", "retrieve", "lookup", "show"],
5
+ "search": ["find", "get", "query", "lookup", "show"],
6
+ "show": ["get", "display", "view", "fetch", "find"],
7
+ "list": ["get", "all", "collection", "show", "summary"],
8
+ "summary": ["list", "all", "overview", "collection"],
9
+ // Create
10
+ "create": ["add", "insert", "make", "new", "post", "generate"],
11
+ "add": ["create", "insert", "append", "attach"],
12
+ "post": ["create", "add", "submit"],
13
+ // Update
14
+ "update": ["modify", "edit", "change", "save", "put", "patch", "set"],
15
+ "modify": ["update", "edit", "change", "adjust"],
16
+ "save": ["update", "store", "persist", "write"],
17
+ // Delete
18
+ "delete": ["remove", "destroy", "clear", "erase", "drop"],
19
+ "remove": ["delete", "detach", "discard"]
20
+ };
21
+ /**
22
+ * Expands a single token into a list of synonyms including itself.
23
+ */
24
+ export function getSynonyms(token) {
25
+ const lowerToken = token.toLowerCase();
26
+ // Direct lookup
27
+ if (SYNONYM_GROUPS[lowerToken]) {
28
+ return [lowerToken, ...SYNONYM_GROUPS[lowerToken]];
29
+ }
30
+ // Reverse lookup (inefficient but thorough for a small map)
31
+ const synonyms = new Set();
32
+ synonyms.add(lowerToken);
33
+ for (const [key, details] of Object.entries(SYNONYM_GROUPS)) {
34
+ if (details.includes(lowerToken)) {
35
+ synonyms.add(key);
36
+ details.forEach(d => synonyms.add(d));
37
+ }
38
+ }
39
+ return Array.from(synonyms);
40
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "fmea-api-mcp-server",
3
- "version": "1.1.0",
3
+ "version": "1.1.2",
4
4
  "description": "MCP server for serving API documentation from endpoints directory",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",