@homenshum/convex-mcp-nodebench 0.4.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/index.js +52 -2
  2. package/dist/tools/actionAuditTools.d.ts +2 -0
  3. package/dist/tools/actionAuditTools.js +180 -0
  4. package/dist/tools/authorizationTools.d.ts +2 -0
  5. package/dist/tools/authorizationTools.js +201 -0
  6. package/dist/tools/critterTools.js +76 -5
  7. package/dist/tools/dataModelingTools.d.ts +2 -0
  8. package/dist/tools/dataModelingTools.js +168 -0
  9. package/dist/tools/deploymentTools.js +42 -2
  10. package/dist/tools/devSetupTools.d.ts +2 -0
  11. package/dist/tools/devSetupTools.js +170 -0
  12. package/dist/tools/embeddingProvider.d.ts +6 -0
  13. package/dist/tools/embeddingProvider.js +3 -0
  14. package/dist/tools/functionTools.js +24 -1
  15. package/dist/tools/httpTools.js +128 -48
  16. package/dist/tools/integrationBridgeTools.js +4 -0
  17. package/dist/tools/methodologyTools.js +8 -1
  18. package/dist/tools/migrationTools.d.ts +2 -0
  19. package/dist/tools/migrationTools.js +133 -0
  20. package/dist/tools/paginationTools.d.ts +2 -0
  21. package/dist/tools/paginationTools.js +125 -0
  22. package/dist/tools/queryEfficiencyTools.d.ts +2 -0
  23. package/dist/tools/queryEfficiencyTools.js +191 -0
  24. package/dist/tools/schemaTools.js +95 -1
  25. package/dist/tools/storageAuditTools.d.ts +2 -0
  26. package/dist/tools/storageAuditTools.js +148 -0
  27. package/dist/tools/toolRegistry.d.ts +9 -2
  28. package/dist/tools/toolRegistry.js +205 -16
  29. package/dist/tools/transactionSafetyTools.d.ts +2 -0
  30. package/dist/tools/transactionSafetyTools.js +166 -0
  31. package/dist/tools/typeSafetyTools.d.ts +2 -0
  32. package/dist/tools/typeSafetyTools.js +146 -0
  33. package/dist/types.d.ts +6 -1
  34. package/package.json +1 -1
@@ -0,0 +1,168 @@
1
+ import { readFileSync, existsSync } from "node:fs";
2
+ import { join, resolve } from "node:path";
3
+ import { getDb, genId } from "../db.js";
4
+ import { getQuickRef } from "./toolRegistry.js";
5
+ // ── Helpers ──────────────────────────────────────────────────────────
6
+ function findConvexDir(projectDir) {
7
+ const candidates = [join(projectDir, "convex"), join(projectDir, "src", "convex")];
8
+ for (const c of candidates) {
9
+ if (existsSync(c))
10
+ return c;
11
+ }
12
+ return null;
13
+ }
14
+ function auditDataModeling(convexDir) {
15
+ const schemaPath = join(convexDir, "schema.ts");
16
+ const issues = [];
17
+ let totalTables = 0;
18
+ let tablesWithArrays = 0;
19
+ let tablesWithDeepNesting = 0;
20
+ let danglingIdRefs = 0;
21
+ let vAnyCount = 0;
22
+ if (!existsSync(schemaPath)) {
23
+ return {
24
+ issues: [{ severity: "critical", location: "schema.ts", message: "No schema.ts found", fix: "Create convex/schema.ts" }],
25
+ stats: { totalTables: 0, tablesWithArrays: 0, tablesWithDeepNesting: 0, danglingIdRefs: 0, vAnyCount: 0 },
26
+ };
27
+ }
28
+ const content = readFileSync(schemaPath, "utf-8");
29
+ const lines = content.split("\n");
30
+ // Parse all table names
31
+ const tableNames = new Set();
32
+ const tableDefPattern = /(\w+)\s*[:=]\s*defineTable\s*\(/g;
33
+ let m;
34
+ while ((m = tableDefPattern.exec(content)) !== null) {
35
+ tableNames.add(m[1]);
36
+ totalTables++;
37
+ }
38
+ // Per-table analysis
39
+ let currentTable = "";
40
+ let tableStartLine = 0;
41
+ let tableNestDepth = 0;
42
+ let maxNestInTable = 0;
43
+ let tableHasArray = false;
44
+ for (let i = 0; i < lines.length; i++) {
45
+ const line = lines[i];
46
+ // Track current table
47
+ const tableDef = line.match(/(\w+)\s*[:=]\s*defineTable\s*\(/);
48
+ if (tableDef) {
49
+ // Check previous table
50
+ if (currentTable && maxNestInTable > 3) {
51
+ tablesWithDeepNesting++;
52
+ issues.push({
53
+ severity: "warning",
54
+ location: `schema.ts:${tableStartLine + 1}`,
55
+ table: currentTable,
56
+ message: `Table "${currentTable}" has ${maxNestInTable} levels of nesting. Deep nesting increases document size and query complexity.`,
57
+ fix: "Consider normalizing deeply nested data into separate tables with Id references",
58
+ });
59
+ }
60
+ if (currentTable && tableHasArray)
61
+ tablesWithArrays++;
62
+ currentTable = tableDef[1];
63
+ tableStartLine = i;
64
+ maxNestInTable = 0;
65
+ tableHasArray = false;
66
+ }
67
+ if (line.trim().startsWith("//") || line.trim().startsWith("*"))
68
+ continue;
69
+ // Track nesting depth within a table
70
+ if (currentTable) {
71
+ const opens = (line.match(/v\.object\s*\(/g) || []).length;
72
+ tableNestDepth += opens;
73
+ if (tableNestDepth > maxNestInTable)
74
+ maxNestInTable = tableNestDepth;
75
+ const closes = (line.match(/\)/g) || []).length;
76
+ tableNestDepth = Math.max(0, tableNestDepth - closes);
77
+ }
78
+ // Check: v.array() usage (potential size limit issue)
79
+ if (/v\.array\s*\(/.test(line) && currentTable) {
80
+ tableHasArray = true;
81
+ }
82
+ // Check: v.any() usage
83
+ if (/v\.any\s*\(\s*\)/.test(line)) {
84
+ vAnyCount++;
85
+ }
86
+ // Check: v.id("tableName") references — verify the table exists
87
+ const idRefPattern = /v\.id\s*\(\s*["'](\w+)["']\s*\)/g;
88
+ let idMatch;
89
+ while ((idMatch = idRefPattern.exec(line)) !== null) {
90
+ const refTable = idMatch[1];
91
+ if (refTable !== "_storage" && !tableNames.has(refTable)) {
92
+ danglingIdRefs++;
93
+ issues.push({
94
+ severity: "critical",
95
+ location: `schema.ts:${i + 1}`,
96
+ table: currentTable,
97
+ message: `v.id("${refTable}") references table "${refTable}" which is not defined in the schema. This will cause type errors.`,
98
+ fix: `Either add the "${refTable}" table to the schema or fix the reference`,
99
+ });
100
+ }
101
+ }
102
+ }
103
+ // Check last table
104
+ if (currentTable && maxNestInTable > 3) {
105
+ tablesWithDeepNesting++;
106
+ issues.push({
107
+ severity: "warning",
108
+ location: `schema.ts:${tableStartLine + 1}`,
109
+ table: currentTable,
110
+ message: `Table "${currentTable}" has ${maxNestInTable} levels of nesting.`,
111
+ fix: "Consider normalizing deeply nested data into separate tables",
112
+ });
113
+ }
114
+ if (currentTable && tableHasArray)
115
+ tablesWithArrays++;
116
+ // Check: tables with many fields (approaching limits)
117
+ const fieldCountPattern = /defineTable\s*\(\s*v\.object\s*\(\s*\{([\s\S]*?)\}\s*\)/g;
118
+ let fcm;
119
+ while ((fcm = fieldCountPattern.exec(content)) !== null) {
120
+ const fieldBlock = fcm[1];
121
+ const fieldCount = (fieldBlock.match(/\w+\s*:/g) || []).length;
122
+ if (fieldCount > 50) {
123
+ issues.push({
124
+ severity: "warning",
125
+ location: "schema.ts",
126
+ message: `A table has ${fieldCount} fields. Consider splitting into related tables if this grows further (Convex max: 1024 fields).`,
127
+ fix: "Split large tables into related tables with Id references",
128
+ });
129
+ }
130
+ }
131
+ return {
132
+ issues,
133
+ stats: { totalTables, tablesWithArrays, tablesWithDeepNesting, danglingIdRefs, vAnyCount },
134
+ };
135
+ }
136
+ // ── Tool Definition ─────────────────────────────────────────────────
137
+ export const dataModelingTools = [
138
+ {
139
+ name: "convex_audit_data_modeling",
140
+ description: "Audit Convex schema for data modeling issues: deeply nested objects (flatten into tables), dangling v.id() references to non-existent tables, tables approaching field count limits, v.any() overuse, and array fields that may hit size limits.",
141
+ inputSchema: {
142
+ type: "object",
143
+ properties: {
144
+ projectDir: {
145
+ type: "string",
146
+ description: "Absolute path to the project root containing a convex/ directory",
147
+ },
148
+ },
149
+ required: ["projectDir"],
150
+ },
151
+ handler: async (args) => {
152
+ const projectDir = resolve(args.projectDir);
153
+ const convexDir = findConvexDir(projectDir);
154
+ if (!convexDir) {
155
+ return { error: "No convex/ directory found" };
156
+ }
157
+ const { issues, stats } = auditDataModeling(convexDir);
158
+ const db = getDb();
159
+ db.prepare("INSERT INTO audit_results (id, project_dir, audit_type, issues_json, issue_count) VALUES (?, ?, ?, ?, ?)").run(genId("audit"), projectDir, "data_modeling", JSON.stringify(issues), issues.length);
160
+ return {
161
+ summary: { ...stats, totalIssues: issues.length },
162
+ issues,
163
+ quickRef: getQuickRef("convex_audit_data_modeling"),
164
+ };
165
+ },
166
+ },
167
+ ];
168
+ //# sourceMappingURL=dataModelingTools.js.map
@@ -20,7 +20,12 @@ function runPreDeployChecks(projectDir) {
20
20
  if (!convexDir) {
21
21
  checks.push({ name: "convex_dir_exists", passed: false, message: "No convex/ directory found" });
22
22
  blockers.push("No convex/ directory found");
23
- return { passed: false, checks, blockers };
23
+ return {
24
+ passed: false,
25
+ checks,
26
+ blockers: blockers.map((b, i) => ({ priority: i + 1, blocker: b, fixFirst: i === 0 })),
27
+ fixOrder: "Fix #1: Create a convex/ directory. Then re-run convex_pre_deploy_gate.",
28
+ };
24
29
  }
25
30
  checks.push({ name: "convex_dir_exists", passed: true, message: `Found at ${convexDir}` });
26
31
  // Check 2: schema.ts exists
@@ -101,10 +106,19 @@ function runPreDeployChecks(projectDir) {
101
106
  checks.push({ name: "generated_dir_exists", passed: false, message: "_generated/ not found - run 'npx convex dev' first" });
102
107
  blockers.push("Run 'npx convex dev' to initialize the project before deploying");
103
108
  }
109
+ // Add priority ordering to blockers
110
+ const prioritizedBlockers = blockers.map((b, i) => ({
111
+ priority: i + 1,
112
+ blocker: b,
113
+ fixFirst: i === 0,
114
+ }));
104
115
  return {
105
116
  passed: blockers.length === 0,
106
117
  checks,
107
- blockers,
118
+ blockers: prioritizedBlockers,
119
+ fixOrder: blockers.length > 0
120
+ ? `Fix ${blockers.length} blocker(s) in order: ${blockers.map((_, i) => `#${i + 1}`).join(" → ")}. Then re-run convex_pre_deploy_gate.`
121
+ : "All checks passed. Safe to deploy.",
108
122
  };
109
123
  }
110
124
  function checkEnvVars(projectDir) {
@@ -222,8 +236,34 @@ export const deploymentTools = [
222
236
  handler: async (args) => {
223
237
  const projectDir = resolve(args.projectDir);
224
238
  const result = checkEnvVars(projectDir);
239
+ // Group missing vars by service for actionable output
240
+ const serviceGroups = {};
241
+ for (const v of result.missingInEnvFile) {
242
+ const vUp = v.toUpperCase();
243
+ const svc = vUp.includes("OPENAI") ? "OpenAI" :
244
+ vUp.includes("GEMINI") || vUp.includes("GOOGLE") ? "Google" :
245
+ vUp.includes("OPENBB") ? "OpenBB" :
246
+ vUp.includes("TWILIO") ? "Twilio" :
247
+ vUp.includes("LINKEDIN") ? "LinkedIn" :
248
+ vUp.includes("GITHUB") ? "GitHub" :
249
+ vUp.includes("STRIPE") ? "Stripe" :
250
+ vUp.includes("OPENROUTER") ? "OpenRouter" :
251
+ vUp.includes("RESEARCH") ? "Research MCP" :
252
+ vUp.includes("MCP") ? "MCP" :
253
+ vUp.includes("NTFY") ? "Ntfy" :
254
+ vUp.includes("XAI") ? "xAI" :
255
+ vUp.includes("CLERK") ? "Clerk" :
256
+ vUp.includes("CONVEX") ? "Convex" :
257
+ "Other";
258
+ if (!serviceGroups[svc])
259
+ serviceGroups[svc] = [];
260
+ serviceGroups[svc].push(v);
261
+ }
225
262
  return {
226
263
  ...result,
264
+ missingByService: Object.entries(serviceGroups)
265
+ .sort(([, a], [, b]) => b.length - a.length)
266
+ .map(([service, vars]) => ({ service, count: vars.length, vars })),
227
267
  quickRef: getQuickRef("convex_check_env_vars"),
228
268
  };
229
269
  },
@@ -0,0 +1,2 @@
1
+ import type { McpTool } from "../types.js";
2
+ export declare const devSetupTools: McpTool[];
@@ -0,0 +1,170 @@
1
+ import { readFileSync, existsSync } from "node:fs";
2
+ import { join, resolve } from "node:path";
3
+ import { getDb, genId } from "../db.js";
4
+ import { getQuickRef } from "./toolRegistry.js";
5
+ function auditDevSetup(projectDir) {
6
+ const issues = [];
7
+ const checks = [];
8
+ // Check 1: .gitignore includes _generated/
9
+ const gitignorePath = join(projectDir, ".gitignore");
10
+ if (existsSync(gitignorePath)) {
11
+ const gitignore = readFileSync(gitignorePath, "utf-8");
12
+ if (gitignore.includes("_generated") || gitignore.includes("convex/_generated")) {
13
+ checks.push({ area: "gitignore", status: "pass", detail: "_generated/ is in .gitignore" });
14
+ }
15
+ else {
16
+ checks.push({ area: "gitignore", status: "warn", detail: "_generated/ not in .gitignore — generated files may be committed" });
17
+ issues.push({
18
+ severity: "warning",
19
+ area: "gitignore",
20
+ message: "_generated/ directory is not in .gitignore. These files are auto-generated and should not be committed.",
21
+ fix: "Add `convex/_generated/` to .gitignore",
22
+ });
23
+ }
24
+ }
25
+ else {
26
+ checks.push({ area: "gitignore", status: "fail", detail: "No .gitignore file found" });
27
+ issues.push({
28
+ severity: "warning",
29
+ area: "gitignore",
30
+ message: "No .gitignore file found. Generated and environment files may be committed.",
31
+ fix: "Create a .gitignore with at least: node_modules/, convex/_generated/, .env.local",
32
+ });
33
+ }
34
+ // Check 2: .env.example exists
35
+ const envExamplePath = join(projectDir, ".env.example");
36
+ if (existsSync(envExamplePath)) {
37
+ checks.push({ area: "env_example", status: "pass", detail: ".env.example exists for onboarding" });
38
+ }
39
+ else {
40
+ // Check if there are env vars in use
41
+ const envLocalPath = join(projectDir, ".env.local");
42
+ const envPath = join(projectDir, ".env");
43
+ if (existsSync(envLocalPath) || existsSync(envPath)) {
44
+ checks.push({ area: "env_example", status: "warn", detail: ".env files exist but no .env.example for new developers" });
45
+ issues.push({
46
+ severity: "info",
47
+ area: "env_example",
48
+ message: "No .env.example file. New developers won't know which env vars to set.",
49
+ fix: "Create .env.example with placeholder values for all required environment variables",
50
+ });
51
+ }
52
+ }
53
+ // Check 3: convex.json exists and points to valid deployment
54
+ const convexJsonPath = join(projectDir, "convex.json");
55
+ if (existsSync(convexJsonPath)) {
56
+ try {
57
+ const convexJson = JSON.parse(readFileSync(convexJsonPath, "utf-8"));
58
+ if (convexJson.project) {
59
+ checks.push({ area: "convex_json", status: "pass", detail: `convex.json configured for project: ${convexJson.project}` });
60
+ }
61
+ else {
62
+ checks.push({ area: "convex_json", status: "warn", detail: "convex.json exists but no project configured" });
63
+ }
64
+ }
65
+ catch {
66
+ checks.push({ area: "convex_json", status: "fail", detail: "convex.json exists but is invalid JSON" });
67
+ issues.push({
68
+ severity: "critical",
69
+ area: "convex_json",
70
+ message: "convex.json is invalid JSON. Convex CLI won't work.",
71
+ fix: "Fix the JSON syntax in convex.json or delete and run `npx convex dev` to regenerate",
72
+ });
73
+ }
74
+ }
75
+ // Check 4: package.json has convex as dependency
76
+ const pkgJsonPath = join(projectDir, "package.json");
77
+ if (existsSync(pkgJsonPath)) {
78
+ try {
79
+ const pkg = JSON.parse(readFileSync(pkgJsonPath, "utf-8"));
80
+ const deps = { ...pkg.dependencies, ...pkg.devDependencies };
81
+ if (deps.convex) {
82
+ checks.push({ area: "convex_dep", status: "pass", detail: `convex@${deps.convex} installed` });
83
+ }
84
+ else {
85
+ checks.push({ area: "convex_dep", status: "fail", detail: "convex not in dependencies" });
86
+ issues.push({
87
+ severity: "critical",
88
+ area: "convex_dep",
89
+ message: "convex package is not in dependencies. Install it first.",
90
+ fix: "Run: npm install convex",
91
+ });
92
+ }
93
+ }
94
+ catch { /* ignore parse errors */ }
95
+ }
96
+ // Check 5: tsconfig.json configured for Convex
97
+ const tsconfigPath = join(projectDir, "tsconfig.json");
98
+ if (existsSync(tsconfigPath)) {
99
+ const tsconfig = readFileSync(tsconfigPath, "utf-8");
100
+ if (/\"strict\"\s*:\s*true/.test(tsconfig)) {
101
+ checks.push({ area: "tsconfig", status: "pass", detail: "TypeScript strict mode enabled" });
102
+ }
103
+ else {
104
+ checks.push({ area: "tsconfig", status: "info", detail: "TypeScript strict mode not enabled — recommended for Convex" });
105
+ }
106
+ }
107
+ // Check 6: _generated/ directory exists (project initialized)
108
+ const convexDir = join(projectDir, "convex");
109
+ const generatedDir = join(convexDir, "_generated");
110
+ if (existsSync(generatedDir)) {
111
+ checks.push({ area: "initialization", status: "pass", detail: "_generated/ exists — project is initialized" });
112
+ }
113
+ else if (existsSync(convexDir)) {
114
+ checks.push({ area: "initialization", status: "warn", detail: "_generated/ not found — run `npx convex dev` to initialize" });
115
+ issues.push({
116
+ severity: "warning",
117
+ area: "initialization",
118
+ message: "Convex project not initialized — _generated/ directory is missing.",
119
+ fix: "Run `npx convex dev` to generate types and initialize the project",
120
+ });
121
+ }
122
+ // Check 7: Node modules installed
123
+ const nodeModulesPath = join(projectDir, "node_modules", "convex");
124
+ if (existsSync(nodeModulesPath)) {
125
+ checks.push({ area: "node_modules", status: "pass", detail: "convex package installed in node_modules" });
126
+ }
127
+ else if (existsSync(pkgJsonPath)) {
128
+ checks.push({ area: "node_modules", status: "warn", detail: "convex not in node_modules — run npm install" });
129
+ }
130
+ return { issues, checks };
131
+ }
132
+ // ── Tool Definition ─────────────────────────────────────────────────
133
+ export const devSetupTools = [
134
+ {
135
+ name: "convex_audit_dev_setup",
136
+ description: "Audit Convex project development setup: .gitignore includes _generated/, .env.example exists, convex.json is valid, convex is in dependencies, TypeScript strict mode, project initialization status.",
137
+ inputSchema: {
138
+ type: "object",
139
+ properties: {
140
+ projectDir: {
141
+ type: "string",
142
+ description: "Absolute path to the project root",
143
+ },
144
+ },
145
+ required: ["projectDir"],
146
+ },
147
+ handler: async (args) => {
148
+ const projectDir = resolve(args.projectDir);
149
+ const { issues, checks } = auditDevSetup(projectDir);
150
+ const db = getDb();
151
+ db.prepare("INSERT INTO audit_results (id, project_dir, audit_type, issues_json, issue_count) VALUES (?, ?, ?, ?, ?)").run(genId("audit"), projectDir, "dev_setup", JSON.stringify(issues), issues.length);
152
+ const passed = checks.filter((c) => c.status === "pass").length;
153
+ const warned = checks.filter((c) => c.status === "warn").length;
154
+ const failed = checks.filter((c) => c.status === "fail").length;
155
+ return {
156
+ summary: {
157
+ totalChecks: checks.length,
158
+ passed,
159
+ warned,
160
+ failed,
161
+ totalIssues: issues.length,
162
+ },
163
+ checks,
164
+ issues,
165
+ quickRef: getQuickRef("convex_audit_dev_setup"),
166
+ };
167
+ },
168
+ },
169
+ ];
170
+ //# sourceMappingURL=devSetupTools.js.map
@@ -11,19 +11,25 @@ export interface EmbeddingProvider {
11
11
  dimensions: number;
12
12
  embed(texts: string[]): Promise<Float32Array[]>;
13
13
  }
14
+ /** Node type in the bipartite graph: tool nodes vs domain (agent) nodes */
15
+ export type GraphNodeType = "tool" | "domain";
14
16
  interface EmbeddingIndexEntry {
15
17
  name: string;
16
18
  vector: Float32Array;
19
+ /** Node type for Agent-as-a-Graph bipartite scoring */
20
+ nodeType: GraphNodeType;
17
21
  }
18
22
  export declare function getEmbeddingProvider(): Promise<EmbeddingProvider | null>;
19
23
  export declare function initEmbeddingIndex(corpus: Array<{
20
24
  name: string;
21
25
  text: string;
26
+ nodeType?: GraphNodeType;
22
27
  }>): Promise<void>;
23
28
  export declare function embedQuery(text: string): Promise<Float32Array | null>;
24
29
  export declare function embeddingSearch(queryVec: Float32Array, limit?: number): Array<{
25
30
  name: string;
26
31
  similarity: number;
32
+ nodeType: GraphNodeType;
27
33
  }>;
28
34
  export declare function isEmbeddingReady(): boolean;
29
35
  export declare function getProviderName(): string | null;
@@ -120,6 +120,7 @@ async function _doInit(corpus) {
120
120
  _embeddingIndex = corpus.map((c) => ({
121
121
  name: c.name,
122
122
  vector: new Float32Array(cached.entries[c.name]),
123
+ nodeType: c.nodeType ?? "tool",
123
124
  }));
124
125
  return;
125
126
  }
@@ -129,6 +130,7 @@ async function _doInit(corpus) {
129
130
  _embeddingIndex = corpus.map((c, i) => ({
130
131
  name: c.name,
131
132
  vector: vectors[i],
133
+ nodeType: c.nodeType ?? "tool",
132
134
  }));
133
135
  const cacheData = {
134
136
  providerName: provider.name,
@@ -165,6 +167,7 @@ export function embeddingSearch(queryVec, limit = 30) {
165
167
  const scored = _embeddingIndex.map((entry) => ({
166
168
  name: entry.name,
167
169
  similarity: cosineSim(queryVec, entry.vector),
170
+ nodeType: entry.nodeType,
168
171
  }));
169
172
  scored.sort((a, b) => b.similarity - a.similarity);
170
173
  return scored.slice(0, limit);
@@ -287,6 +287,22 @@ export const functionTools = [
287
287
  db.prepare("INSERT INTO audit_results (id, project_dir, audit_type, issues_json, issue_count) VALUES (?, ?, ?, ?, ?)").run(genId("audit"), projectDir, "function_audit", JSON.stringify(issues), issues.length);
288
288
  const critical = issues.filter((i) => i.severity === "critical");
289
289
  const warnings = issues.filter((i) => i.severity === "warning");
290
+ // Aggregate issues by category for cleaner output
291
+ const categories = {};
292
+ for (const issue of issues) {
293
+ const cat = issue.message.includes("missing args") ? "missing_args_validator" :
294
+ issue.message.includes("missing returns") ? "missing_returns_validator" :
295
+ issue.message.includes("missing handler") ? "missing_handler_old_syntax" :
296
+ issue.message.includes("sensitive") ? "sensitive_function_public" :
297
+ issue.message.includes("queries cannot") ? "query_cross_call_violation" :
298
+ issue.message.includes("multiple actions") ? "action_from_action" :
299
+ "other";
300
+ if (!categories[cat])
301
+ categories[cat] = { severity: issue.severity, count: 0, examples: [] };
302
+ categories[cat].count++;
303
+ if (categories[cat].examples.length < 5)
304
+ categories[cat].examples.push(issue);
305
+ }
290
306
  return {
291
307
  summary: {
292
308
  totalFunctions: functions.length,
@@ -296,7 +312,14 @@ export const functionTools = [
296
312
  critical: critical.length,
297
313
  warnings: warnings.length,
298
314
  },
299
- issues,
315
+ issuesByCategory: Object.entries(categories)
316
+ .sort(([, a], [, b]) => (b.severity === "critical" ? 1 : 0) - (a.severity === "critical" ? 1 : 0) || b.count - a.count)
317
+ .map(([cat, data]) => ({
318
+ category: cat,
319
+ severity: data.severity,
320
+ count: data.count,
321
+ examples: data.examples,
322
+ })),
300
323
  quickRef: getQuickRef("convex_audit_functions"),
301
324
  };
302
325
  },