botvisibility 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,718 @@
1
+ import { RepoCheckResult } from './types.js';
2
+ import * as fs from 'fs';
3
+ import * as path from 'path';
4
+
5
+ // Recursive file finder
6
+ function findFiles(dir: string, pattern: RegExp, results: string[] = []): string[] {
7
+ try {
8
+ const items = fs.readdirSync(dir);
9
+ for (const item of items) {
10
+ const fullPath = path.join(dir, item);
11
+ const stat = fs.statSync(fullPath);
12
+
13
+ if (stat.isDirectory()) {
14
+ if (!['node_modules', '.git', 'dist', 'build', '.next', 'vendor'].includes(item)) {
15
+ findFiles(fullPath, pattern, results);
16
+ }
17
+ } else if (pattern.test(item)) {
18
+ results.push(fullPath);
19
+ }
20
+ }
21
+ } catch {
22
+ // Skip unreadable directories
23
+ }
24
+ return results;
25
+ }
26
+
27
+ // Search file content
28
+ function searchInFiles(dir: string, pattern: RegExp, filePattern: RegExp): Array<{file: string, line: number, content: string}> {
29
+ const matches: Array<{file: string, line: number, content: string}> = [];
30
+ const files = findFiles(dir, filePattern);
31
+
32
+ for (const file of files) {
33
+ try {
34
+ const content = fs.readFileSync(file, 'utf-8');
35
+ const lines = content.split('\n');
36
+ for (let i = 0; i < lines.length; i++) {
37
+ if (pattern.test(lines[i])) {
38
+ matches.push({
39
+ file,
40
+ line: i + 1,
41
+ content: lines[i].trim().slice(0, 100)
42
+ });
43
+ }
44
+ }
45
+ } catch {
46
+ // Skip unreadable files
47
+ }
48
+ }
49
+
50
+ return matches;
51
+ }
52
+
53
+ const codeFiles = /\.(ts|js|py|rb|go|java|php)$/;
54
+ const allFiles = /\.(ts|js|py|rb|go|java|php|json|ya?ml|md|toml)$/;
55
+
56
+ // --- Supplemental checks (kept from original, remapped IDs) ---
57
+
58
+ export function checkOpenApiFiles(repoPath: string): RepoCheckResult {
59
+ const specFiles = findFiles(repoPath, /\b(openapi|swagger)\.(json|ya?ml)$/i);
60
+
61
+ if (specFiles.length > 0) {
62
+ return {
63
+ id: 'repo-1.3', name: 'OpenAPI Spec Files', passed: true, status: 'pass', level: 1, category: 'Discoverable', autoDetectable: true,
64
+ message: `Found ${specFiles.length} OpenAPI/Swagger spec file(s)`,
65
+ filePath: specFiles[0],
66
+ details: specFiles.map(f => path.relative(repoPath, f)).join(', ')
67
+ };
68
+ }
69
+
70
+ return {
71
+ id: 'repo-1.3', name: 'OpenAPI Spec Files', passed: false, status: 'fail', level: 1, category: 'Discoverable', autoDetectable: true,
72
+ message: 'No OpenAPI/Swagger spec files found in repo',
73
+ recommendation: 'Create an openapi.json or openapi.yaml file defining your API'
74
+ };
75
+ }
76
+
77
+ export function checkRateLimitMiddleware(repoPath: string): RepoCheckResult {
78
+ const patterns = [/rate.?limit/i, /throttle/i, /RateLimiter/i, /express-rate-limit/i, /slowapi/i, /ratelimit/i];
79
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
80
+
81
+ for (const pattern of patterns) {
82
+ allMatches.push(...searchInFiles(repoPath, pattern, codeFiles));
83
+ }
84
+
85
+ if (allMatches.length > 0) {
86
+ return {
87
+ id: 'repo-3.5', name: 'Rate Limit Middleware', passed: true, status: 'pass', level: 3, category: 'Optimized', autoDetectable: true,
88
+ message: `Found rate limiting in ${allMatches.length} location(s)`,
89
+ details: allMatches.slice(0, 3).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
90
+ };
91
+ }
92
+
93
+ return {
94
+ id: 'repo-3.5', name: 'Rate Limit Middleware', passed: false, status: 'fail', level: 3, category: 'Optimized', autoDetectable: true,
95
+ message: 'No rate limiting middleware found',
96
+ recommendation: 'Add rate limiting to protect your API and return proper headers'
97
+ };
98
+ }
99
+
100
+ export function checkErrorPatterns(repoPath: string): RepoCheckResult {
101
+ const goodPatterns = [/error.?code/i, /error_code/i, /"error"\s*:\s*\{/i, /json\s*\(\s*\{\s*"?error/i, /JsonResponse.*error/i];
102
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
103
+
104
+ for (const pattern of goodPatterns) {
105
+ allMatches.push(...searchInFiles(repoPath, pattern, codeFiles));
106
+ }
107
+
108
+ if (allMatches.length >= 3) {
109
+ return {
110
+ id: 'repo-2.7', name: 'Structured Error Responses', passed: true, status: 'pass', level: 2, category: 'Usable', autoDetectable: true,
111
+ message: `Found structured error patterns in ${allMatches.length} location(s)`,
112
+ details: 'Code uses consistent error response structure'
113
+ };
114
+ } else if (allMatches.length > 0) {
115
+ return {
116
+ id: 'repo-2.7', name: 'Structured Error Responses', passed: false, status: 'partial', level: 2, category: 'Usable', autoDetectable: true,
117
+ message: `Found some error patterns (${allMatches.length})`,
118
+ recommendation: 'Ensure all errors return structured JSON with error codes'
119
+ };
120
+ }
121
+
122
+ return {
123
+ id: 'repo-2.7', name: 'Structured Error Responses', passed: false, status: 'fail', level: 2, category: 'Usable', autoDetectable: true,
124
+ message: 'No structured error patterns found',
125
+ recommendation: 'Add consistent error response format: { "error": { "code": "...", "message": "..." } }'
126
+ };
127
+ }
128
+
129
+ export function checkIdempotencyKeys(repoPath: string): RepoCheckResult {
130
+ const patterns = [/idempoten/i, /Idempotency.?Key/i, /idempotency_key/i, /x-idempotency/i];
131
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
132
+
133
+ for (const pattern of patterns) {
134
+ allMatches.push(...searchInFiles(repoPath, pattern, codeFiles));
135
+ }
136
+
137
+ if (allMatches.length > 0) {
138
+ return {
139
+ id: 'repo-2.9', name: 'Idempotency Keys', passed: true, status: 'pass', level: 2, category: 'Usable', autoDetectable: true,
140
+ message: `Found idempotency handling in ${allMatches.length} location(s)`,
141
+ details: allMatches.slice(0, 2).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
142
+ };
143
+ }
144
+
145
+ return {
146
+ id: 'repo-2.9', name: 'Idempotency Keys', passed: false, status: 'fail', level: 2, category: 'Usable', autoDetectable: true,
147
+ message: 'No idempotency key handling found',
148
+ recommendation: 'Support Idempotency-Key header for write operations'
149
+ };
150
+ }
151
+
152
+ export function checkStreamingEndpoints(repoPath: string): RepoCheckResult {
153
+ const patterns = [/server.?sent.?event/i, /EventSource/i, /text\/event-stream/i, /websocket/i, /socket\.io/i, /streaming/i, /stream.?response/i];
154
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
155
+
156
+ for (const pattern of patterns) {
157
+ allMatches.push(...searchInFiles(repoPath, pattern, codeFiles));
158
+ }
159
+
160
+ if (allMatches.length > 0) {
161
+ return {
162
+ id: 'repo-3.6', name: 'Streaming/Event Endpoints', passed: true, status: 'pass', level: 3, category: 'Optimized', autoDetectable: true,
163
+ message: `Found streaming patterns in ${allMatches.length} location(s)`,
164
+ details: allMatches.slice(0, 2).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
165
+ };
166
+ }
167
+
168
+ return {
169
+ id: 'repo-3.6', name: 'Streaming/Event Endpoints', passed: false, status: 'fail', level: 3, category: 'Optimized', autoDetectable: true,
170
+ message: 'No streaming/event endpoints found',
171
+ recommendation: 'Consider adding SSE or WebSocket for real-time updates'
172
+ };
173
+ }
174
+
175
+ // --- Level 4: Agent-Native checks ---
176
+
177
+ export function checkIntentEndpoints(repoPath: string): RepoCheckResult {
178
+ // Search for intent-based/action-oriented route patterns
179
+ const patterns = [
180
+ /\/(send|process|execute|submit|trigger|run|perform|generate|analyze|convert|export|import|sync|verify|validate|approve|reject|cancel|refund)[_-]?\w+/i,
181
+ /router\.(post|put)\s*\(\s*['"]\/(send|process|execute|submit|trigger)/i,
182
+ /app\.(post|put)\s*\(\s*['"]\/(send|process|execute|submit|trigger)/i,
183
+ /path\s*=\s*['"]\/(send|process|execute|submit|trigger)/i,
184
+ ];
185
+
186
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
187
+ for (const pattern of patterns) {
188
+ allMatches.push(...searchInFiles(repoPath, pattern, codeFiles));
189
+ }
190
+
191
+ if (allMatches.length >= 2) {
192
+ const uniqueFiles = [...new Set(allMatches.map(m => m.file))];
193
+ return {
194
+ id: '4.1', name: 'Intent-Based Endpoints', passed: true, status: 'pass', level: 4, category: 'Agent-Native', autoDetectable: true,
195
+ message: `Found intent-based endpoints in ${uniqueFiles.length} file(s)`,
196
+ details: allMatches.slice(0, 3).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
197
+ };
198
+ } else if (allMatches.length === 1) {
199
+ return {
200
+ id: '4.1', name: 'Intent-Based Endpoints', passed: false, status: 'partial', level: 4, category: 'Agent-Native', autoDetectable: true,
201
+ message: 'Found some intent-based patterns',
202
+ recommendation: 'Add more high-level intent endpoints (e.g., /send-invoice, /process-payment) alongside CRUD'
203
+ };
204
+ }
205
+
206
+ return {
207
+ id: '4.1', name: 'Intent-Based Endpoints', passed: false, status: 'fail', level: 4, category: 'Agent-Native', autoDetectable: true,
208
+ message: 'No intent-based endpoints found',
209
+ recommendation: 'Add high-level intent endpoints (e.g., /send-invoice, /process-payment) alongside CRUD'
210
+ };
211
+ }
212
+
213
+ export function checkAgentSessions(repoPath: string): RepoCheckResult {
214
+ const patterns = [
215
+ /agent.?session/i,
216
+ /session.?context/i,
217
+ /conversation.?id/i,
218
+ /thread.?id/i,
219
+ /agent.?context/i,
220
+ /persistent.?session/i,
221
+ /session.?store/i,
222
+ /x-session-id/i,
223
+ /x-agent-session/i,
224
+ ];
225
+
226
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
227
+ for (const pattern of patterns) {
228
+ allMatches.push(...searchInFiles(repoPath, pattern, allFiles));
229
+ }
230
+
231
+ if (allMatches.length > 0) {
232
+ const uniqueFiles = [...new Set(allMatches.map(m => m.file))];
233
+ return {
234
+ id: '4.2', name: 'Agent Sessions', passed: true, status: 'pass', level: 4, category: 'Agent-Native', autoDetectable: true,
235
+ message: `Found agent session patterns in ${uniqueFiles.length} file(s)`,
236
+ details: allMatches.slice(0, 3).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
237
+ };
238
+ }
239
+
240
+ return {
241
+ id: '4.2', name: 'Agent Sessions', passed: false, status: 'fail', level: 4, category: 'Agent-Native', autoDetectable: true,
242
+ message: 'No agent session management found',
243
+ recommendation: 'Implement persistent sessions with context for multi-step agent interactions'
244
+ };
245
+ }
246
+
247
+ export function checkScopedAgentTokens(repoPath: string): RepoCheckResult {
248
+ const patterns = [
249
+ /agent.?token/i,
250
+ /agent.?scope/i,
251
+ /agent.?key/i,
252
+ /agent.?credential/i,
253
+ /capability.?limit/i,
254
+ /scoped.?token/i,
255
+ /agent.?permission/i,
256
+ /agent.?role/i,
257
+ /x-agent-token/i,
258
+ ];
259
+
260
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
261
+ for (const pattern of patterns) {
262
+ allMatches.push(...searchInFiles(repoPath, pattern, allFiles));
263
+ }
264
+
265
+ if (allMatches.length > 0) {
266
+ const uniqueFiles = [...new Set(allMatches.map(m => m.file))];
267
+ return {
268
+ id: '4.3', name: 'Scoped Agent Tokens', passed: true, status: 'pass', level: 4, category: 'Agent-Native', autoDetectable: true,
269
+ message: `Found agent token patterns in ${uniqueFiles.length} file(s)`,
270
+ details: allMatches.slice(0, 3).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
271
+ };
272
+ }
273
+
274
+ return {
275
+ id: '4.3', name: 'Scoped Agent Tokens', passed: false, status: 'fail', level: 4, category: 'Agent-Native', autoDetectable: true,
276
+ message: 'No agent-specific token/scope patterns found',
277
+ recommendation: 'Create agent-specific tokens with capability limits (read-only, write, admin)'
278
+ };
279
+ }
280
+
281
+ export function checkAgentAuditLogs(repoPath: string): RepoCheckResult {
282
+ const patterns = [
283
+ /agent.?audit/i,
284
+ /audit.?log.*agent/i,
285
+ /agent.?identifier/i,
286
+ /agent.?id.*log/i,
287
+ /log.*agent.?id/i,
288
+ /x-agent-id/i,
289
+ /user.?agent.*audit/i,
290
+ ];
291
+
292
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
293
+ for (const pattern of patterns) {
294
+ allMatches.push(...searchInFiles(repoPath, pattern, allFiles));
295
+ }
296
+
297
+ if (allMatches.length > 0) {
298
+ const uniqueFiles = [...new Set(allMatches.map(m => m.file))];
299
+ return {
300
+ id: '4.4', name: 'Agent Audit Logs', passed: true, status: 'pass', level: 4, category: 'Agent-Native', autoDetectable: true,
301
+ message: `Found agent audit logging in ${uniqueFiles.length} file(s)`,
302
+ details: allMatches.slice(0, 3).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
303
+ };
304
+ }
305
+
306
+ return {
307
+ id: '4.4', name: 'Agent Audit Logs', passed: false, status: 'fail', level: 4, category: 'Agent-Native', autoDetectable: true,
308
+ message: 'No agent audit logging found',
309
+ recommendation: 'Log API actions with agent identifiers for traceability'
310
+ };
311
+ }
312
+
313
+ export function checkSandboxEnvironment(repoPath: string): RepoCheckResult {
314
+ const patterns = [
315
+ /sandbox/i,
316
+ /test.?environment/i,
317
+ /staging.?env/i,
318
+ /dry.?run/i,
319
+ /test.?mode/i,
320
+ ];
321
+
322
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
323
+ for (const pattern of patterns) {
324
+ allMatches.push(...searchInFiles(repoPath, pattern, allFiles));
325
+ }
326
+
327
+ // Also check for sandbox config files
328
+ const sandboxFiles = findFiles(repoPath, /sandbox|\.env\.test|\.env\.staging/i);
329
+
330
+ const total = allMatches.length + sandboxFiles.length;
331
+
332
+ if (total > 0) {
333
+ return {
334
+ id: '4.5', name: 'Sandbox Environment', passed: true, status: 'pass', level: 4, category: 'Agent-Native', autoDetectable: true,
335
+ message: `Found sandbox/test environment patterns in ${total} location(s)`,
336
+ details: allMatches.length > 0
337
+ ? allMatches.slice(0, 2).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
338
+ : sandboxFiles.slice(0, 2).map(f => path.relative(repoPath, f)).join(', ')
339
+ };
340
+ }
341
+
342
+ return {
343
+ id: '4.5', name: 'Sandbox Environment', passed: false, status: 'fail', level: 4, category: 'Agent-Native', autoDetectable: true,
344
+ message: 'No sandbox environment found',
345
+ recommendation: 'Provide a sandbox environment for agents to test operations safely'
346
+ };
347
+ }
348
+
349
+ export function checkConsequenceLabels(repoPath: string): RepoCheckResult {
350
+ const patterns = [
351
+ /consequen(ce|tial)/i,
352
+ /irreversible/i,
353
+ /destructive/i,
354
+ /dangerous/i,
355
+ /x-consequence/i,
356
+ /x-reversible/i,
357
+ /side.?effect/i,
358
+ /confirmation.?required/i,
359
+ ];
360
+
361
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
362
+ for (const pattern of patterns) {
363
+ allMatches.push(...searchInFiles(repoPath, pattern, allFiles));
364
+ }
365
+
366
+ if (allMatches.length > 0) {
367
+ const uniqueFiles = [...new Set(allMatches.map(m => m.file))];
368
+ return {
369
+ id: '4.6', name: 'Consequence Labels', passed: true, status: 'pass', level: 4, category: 'Agent-Native', autoDetectable: true,
370
+ message: `Found consequence annotations in ${uniqueFiles.length} file(s)`,
371
+ details: allMatches.slice(0, 3).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
372
+ };
373
+ }
374
+
375
+ return {
376
+ id: '4.6', name: 'Consequence Labels', passed: false, status: 'fail', level: 4, category: 'Agent-Native', autoDetectable: true,
377
+ message: 'No consequence labels found',
378
+ recommendation: 'Mark consequential/irreversible actions in your API docs or schema annotations'
379
+ };
380
+ }
381
+
382
+ export function checkNativeToolSchemas(repoPath: string): RepoCheckResult {
383
+ // Check for tool definition files
384
+ const toolFiles = findFiles(repoPath, /\.(tool|tools)\.(json|ya?ml)$|mcp\.(json|ya?ml)$|tool_?definitions?\.(json|ya?ml)$/i);
385
+
386
+ // Also search for tool schema patterns in code
387
+ const patterns = [
388
+ /tool.?schema/i,
389
+ /tool.?definition/i,
390
+ /function.?calling/i,
391
+ /tool.?manifest/i,
392
+ /mcp.?config/i,
393
+ /openai.?function/i,
394
+ /anthropic.?tool/i,
395
+ ];
396
+
397
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
398
+ for (const pattern of patterns) {
399
+ allMatches.push(...searchInFiles(repoPath, pattern, allFiles));
400
+ }
401
+
402
+ const total = toolFiles.length + allMatches.length;
403
+
404
+ if (toolFiles.length > 0) {
405
+ return {
406
+ id: '4.7', name: 'Native Tool Schemas', passed: true, status: 'pass', level: 4, category: 'Agent-Native', autoDetectable: true,
407
+ message: `Found tool definition files`,
408
+ filePath: toolFiles[0],
409
+ details: toolFiles.map(f => path.relative(repoPath, f)).join(', ')
410
+ };
411
+ } else if (allMatches.length > 0) {
412
+ return {
413
+ id: '4.7', name: 'Native Tool Schemas', passed: false, status: 'partial', level: 4, category: 'Agent-Native', autoDetectable: true,
414
+ message: `Found tool schema references in ${allMatches.length} location(s)`,
415
+ details: allMatches.slice(0, 2).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', '),
416
+ recommendation: 'Extract tool definitions into standalone .tool.json or MCP config files'
417
+ };
418
+ }
419
+
420
+ return {
421
+ id: '4.7', name: 'Native Tool Schemas', passed: false, status: 'fail', level: 4, category: 'Agent-Native', autoDetectable: true,
422
+ message: 'No tool definition files found',
423
+ recommendation: 'Create ready-to-use tool definition files (.tool.json, MCP configs) for agent frameworks'
424
+ };
425
+ }
426
+
427
+ // --- Level 3: Optimization code checks ---
428
+
429
+ export function checkSparseFieldsCode(repoPath: string): RepoCheckResult {
430
+ const patterns = [
431
+ /[?&]fields=/i,
432
+ /[?&]select=/i,
433
+ /\.select\s*\(/i,
434
+ /\.only\s*\(/i,
435
+ /\.values\s*\(/i,
436
+ /\.values_list\s*\(/i,
437
+ /fields\s*[:=]\s*req/i,
438
+ /query\.fields/i,
439
+ /params\.(fields|select)/i,
440
+ /projection\s*[:=]/i,
441
+ ];
442
+
443
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
444
+ for (const pattern of patterns) {
445
+ allMatches.push(...searchInFiles(repoPath, pattern, codeFiles));
446
+ }
447
+
448
+ if (allMatches.length >= 2) {
449
+ const uniqueFiles = [...new Set(allMatches.map(m => m.file))];
450
+ return {
451
+ id: 'repo-3.1', name: 'Sparse Fields', passed: true, status: 'pass', level: 3, category: 'Optimized', autoDetectable: true,
452
+ message: `Found sparse field patterns in ${uniqueFiles.length} file(s)`,
453
+ details: allMatches.slice(0, 3).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
454
+ };
455
+ } else if (allMatches.length === 1) {
456
+ return {
457
+ id: 'repo-3.1', name: 'Sparse Fields', passed: false, status: 'partial', level: 3, category: 'Optimized', autoDetectable: true,
458
+ message: 'Found some sparse field patterns',
459
+ recommendation: 'Add a fields or select query parameter to all list/get endpoints'
460
+ };
461
+ }
462
+
463
+ return {
464
+ id: 'repo-3.1', name: 'Sparse Fields', passed: false, status: 'fail', level: 3, category: 'Optimized', autoDetectable: true,
465
+ message: 'No sparse field support found in code',
466
+ recommendation: 'Add a fields or select parameter to endpoints so agents can request only needed data'
467
+ };
468
+ }
469
+
470
+ export function checkCursorPaginationCode(repoPath: string): RepoCheckResult {
471
+ const patterns = [
472
+ /cursor/i,
473
+ /next_?token/i,
474
+ /next_?page_?token/i,
475
+ /page_?token/i,
476
+ /start_?after/i,
477
+ /\.paginate\s*\(/i,
478
+ /has_?more/i,
479
+ /next_?cursor/i,
480
+ /continuation_?token/i,
481
+ ];
482
+
483
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
484
+ for (const pattern of patterns) {
485
+ allMatches.push(...searchInFiles(repoPath, pattern, codeFiles));
486
+ }
487
+
488
+ if (allMatches.length >= 2) {
489
+ const uniqueFiles = [...new Set(allMatches.map(m => m.file))];
490
+ return {
491
+ id: 'repo-3.2', name: 'Cursor Pagination', passed: true, status: 'pass', level: 3, category: 'Optimized', autoDetectable: true,
492
+ message: `Found cursor pagination patterns in ${uniqueFiles.length} file(s)`,
493
+ details: allMatches.slice(0, 3).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
494
+ };
495
+ } else if (allMatches.length === 1) {
496
+ return {
497
+ id: 'repo-3.2', name: 'Cursor Pagination', passed: false, status: 'partial', level: 3, category: 'Optimized', autoDetectable: true,
498
+ message: 'Found some cursor pagination patterns',
499
+ recommendation: 'Implement cursor-based pagination on all list endpoints with has_more and next_cursor'
500
+ };
501
+ }
502
+
503
+ return {
504
+ id: 'repo-3.2', name: 'Cursor Pagination', passed: false, status: 'fail', level: 3, category: 'Optimized', autoDetectable: true,
505
+ message: 'No cursor pagination found in code',
506
+ recommendation: 'Replace offset pagination with cursor-based pagination for efficient large-set traversal'
507
+ };
508
+ }
509
+
510
+ export function checkSearchFilteringCode(repoPath: string): RepoCheckResult {
511
+ const patterns = [
512
+ /[?&](filter|search|q)=/i,
513
+ /req\.query\.(filter|search|q)\b/i,
514
+ /params\.(filter|search|q)\b/i,
515
+ /query_?params.*filter/i,
516
+ /filter_?by/i,
517
+ /search_?query/i,
518
+ /\.filter\s*\(.*req/i,
519
+ /\.where\s*\(.*req/i,
520
+ ];
521
+
522
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
523
+ for (const pattern of patterns) {
524
+ allMatches.push(...searchInFiles(repoPath, pattern, codeFiles));
525
+ }
526
+
527
+ if (allMatches.length >= 2) {
528
+ const uniqueFiles = [...new Set(allMatches.map(m => m.file))];
529
+ return {
530
+ id: 'repo-3.3', name: 'Search & Filtering', passed: true, status: 'pass', level: 3, category: 'Optimized', autoDetectable: true,
531
+ message: `Found search/filter patterns in ${uniqueFiles.length} file(s)`,
532
+ details: allMatches.slice(0, 3).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
533
+ };
534
+ } else if (allMatches.length === 1) {
535
+ return {
536
+ id: 'repo-3.3', name: 'Search & Filtering', passed: false, status: 'partial', level: 3, category: 'Optimized', autoDetectable: true,
537
+ message: 'Found some search/filter patterns',
538
+ recommendation: 'Add filter and search query parameters to all list endpoints'
539
+ };
540
+ }
541
+
542
+ return {
543
+ id: 'repo-3.3', name: 'Search & Filtering', passed: false, status: 'fail', level: 3, category: 'Optimized', autoDetectable: true,
544
+ message: 'No API search or filtering found in code',
545
+ recommendation: 'Add filter, search, and query parameters so agents can find specific resources without over-fetching'
546
+ };
547
+ }
548
+
549
+ export function checkBulkOpsCode(repoPath: string): RepoCheckResult {
550
+ const patterns = [
551
+ /\/batch/i,
552
+ /\/bulk/i,
553
+ /bulkCreate/i,
554
+ /bulk_create/i,
555
+ /insertMany/i,
556
+ /createMany/i,
557
+ /updateMany/i,
558
+ /deleteMany/i,
559
+ /bulk_update/i,
560
+ /batch_create/i,
561
+ /Promise\.all\s*\(/i,
562
+ ];
563
+
564
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
565
+ for (const pattern of patterns) {
566
+ allMatches.push(...searchInFiles(repoPath, pattern, codeFiles));
567
+ }
568
+
569
+ if (allMatches.length >= 1) {
570
+ const uniqueFiles = [...new Set(allMatches.map(m => m.file))];
571
+ return {
572
+ id: 'repo-3.4', name: 'Bulk Operations', passed: true, status: 'pass', level: 3, category: 'Optimized', autoDetectable: true,
573
+ message: `Found bulk operation patterns in ${uniqueFiles.length} file(s)`,
574
+ details: allMatches.slice(0, 3).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
575
+ };
576
+ }
577
+
578
+ return {
579
+ id: 'repo-3.4', name: 'Bulk Operations', passed: false, status: 'fail', level: 3, category: 'Optimized', autoDetectable: true,
580
+ message: 'No bulk/batch operations found in code',
581
+ recommendation: 'Add batch endpoints for creating, updating, or deleting multiple resources in a single request'
582
+ };
583
+ }
584
+
585
+ export function checkCachingHeadersCode(repoPath: string): RepoCheckResult {
586
+ const patterns = [
587
+ /ETag/i,
588
+ /Cache-Control/i,
589
+ /Last-Modified/i,
590
+ /If-None-Match/i,
591
+ /If-Modified-Since/i,
592
+ /stale-while-revalidate/i,
593
+ /max-age/i,
594
+ /\.cache\s*\(/i,
595
+ /cacheControl/i,
596
+ /setHeader.*cache/i,
597
+ ];
598
+
599
+ const allMatches: Array<{file: string, line: number, content: string}> = [];
600
+ for (const pattern of patterns) {
601
+ allMatches.push(...searchInFiles(repoPath, pattern, codeFiles));
602
+ }
603
+
604
+ if (allMatches.length >= 2) {
605
+ const uniqueFiles = [...new Set(allMatches.map(m => m.file))];
606
+ return {
607
+ id: 'repo-3.6', name: 'Caching Headers', passed: true, status: 'pass', level: 3, category: 'Optimized', autoDetectable: true,
608
+ message: `Found caching header patterns in ${uniqueFiles.length} file(s)`,
609
+ details: allMatches.slice(0, 3).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
610
+ };
611
+ } else if (allMatches.length === 1) {
612
+ return {
613
+ id: 'repo-3.6', name: 'Caching Headers', passed: false, status: 'partial', level: 3, category: 'Optimized', autoDetectable: true,
614
+ message: 'Found some caching patterns',
615
+ recommendation: 'Add ETag, Cache-Control, and Last-Modified headers to API responses'
616
+ };
617
+ }
618
+
619
+ return {
620
+ id: 'repo-3.6', name: 'Caching Headers', passed: false, status: 'fail', level: 3, category: 'Optimized', autoDetectable: true,
621
+ message: 'No caching header patterns found in code',
622
+ recommendation: 'Add Cache-Control, ETag, and Last-Modified headers to reduce token waste from redundant requests'
623
+ };
624
+ }
625
+
626
+ export function checkMcpToolQualityCode(repoPath: string): RepoCheckResult {
627
+ // Look for MCP server definitions
628
+ const mcpPatterns = [
629
+ /McpServer/i,
630
+ /server\.tool\s*\(/i,
631
+ /\.addTool\s*\(/i,
632
+ /mcp.*server/i,
633
+ /tool.*inputSchema/i,
634
+ /\"tools\"\s*:/i,
635
+ ];
636
+
637
+ const mcpMatches: Array<{file: string, line: number, content: string}> = [];
638
+ for (const pattern of mcpPatterns) {
639
+ mcpMatches.push(...searchInFiles(repoPath, pattern, allFiles));
640
+ }
641
+
642
+ // Look for MCP config/manifest files
643
+ const mcpFiles = findFiles(repoPath, /mcp\.(json|ya?ml)$|\.well-known.*mcp/i);
644
+
645
+ // Check for tool descriptions
646
+ const descPatterns = [/description\s*[:=]\s*["'`]/i];
647
+ const descMatches: Array<{file: string, line: number, content: string}> = [];
648
+ for (const pattern of descPatterns) {
649
+ descMatches.push(...searchInFiles(repoPath, pattern, allFiles));
650
+ }
651
+
652
+ const totalMcp = mcpMatches.length + mcpFiles.length;
653
+
654
+ if (totalMcp === 0) {
655
+ return {
656
+ id: 'repo-3.7', name: 'MCP Tool Quality', passed: false, status: 'fail', level: 3, category: 'Optimized', autoDetectable: true,
657
+ message: 'No MCP server or tool definitions found in code',
658
+ recommendation: 'Create an MCP server with well-described tools and input schemas for AI agent integration'
659
+ };
660
+ }
661
+
662
+ // Check quality: do the tools have descriptions?
663
+ const hasDescriptions = descMatches.length >= 2;
664
+ const schemaPatterns = [/inputSchema/i, /parameters.*type.*object/i, /json.?schema/i];
665
+ let schemaCount = 0;
666
+ for (const pattern of schemaPatterns) {
667
+ schemaCount += searchInFiles(repoPath, pattern, allFiles).length;
668
+ }
669
+
670
+ if (hasDescriptions && schemaCount > 0) {
671
+ return {
672
+ id: 'repo-3.7', name: 'MCP Tool Quality', passed: true, status: 'pass', level: 3, category: 'Optimized', autoDetectable: true,
673
+ message: `MCP tools found with descriptions and schemas in ${totalMcp} location(s)`,
674
+ details: mcpMatches.slice(0, 3).map(m => `${path.relative(repoPath, m.file)}:${m.line}`).join(', ')
675
+ };
676
+ }
677
+
678
+ return {
679
+ id: 'repo-3.7', name: 'MCP Tool Quality', passed: false, status: 'partial', level: 3, category: 'Optimized', autoDetectable: true,
680
+ message: `MCP tools found but missing ${!hasDescriptions ? 'descriptions' : 'input schemas'}`,
681
+ recommendation: 'Ensure all MCP tools have detailed descriptions (>10 chars) and inputSchema definitions'
682
+ };
683
+ }
684
+
685
+ // --- Run all repo checks ---
686
+
687
+ export function runRepoChecks(repoPath: string): RepoCheckResult[] {
688
+ if (!fs.existsSync(repoPath)) {
689
+ return [{
690
+ id: 'repo-error', name: 'Repository Path', passed: false, status: 'fail', level: 1, category: 'Error', autoDetectable: true,
691
+ message: `Path does not exist: ${repoPath}`
692
+ }];
693
+ }
694
+
695
+ return [
696
+ // Supplemental checks for Levels 1-2
697
+ checkOpenApiFiles(repoPath),
698
+ checkErrorPatterns(repoPath),
699
+ checkIdempotencyKeys(repoPath),
700
+ // Level 3: Optimization code checks
701
+ checkSparseFieldsCode(repoPath),
702
+ checkCursorPaginationCode(repoPath),
703
+ checkSearchFilteringCode(repoPath),
704
+ checkBulkOpsCode(repoPath),
705
+ checkRateLimitMiddleware(repoPath),
706
+ checkCachingHeadersCode(repoPath),
707
+ checkMcpToolQualityCode(repoPath),
708
+ checkStreamingEndpoints(repoPath),
709
+ // Level 4: Agent-Native checks
710
+ checkIntentEndpoints(repoPath),
711
+ checkAgentSessions(repoPath),
712
+ checkScopedAgentTokens(repoPath),
713
+ checkAgentAuditLogs(repoPath),
714
+ checkSandboxEnvironment(repoPath),
715
+ checkConsequenceLabels(repoPath),
716
+ checkNativeToolSchemas(repoPath),
717
+ ];
718
+ }