@homenshum/convex-mcp-nodebench 0.4.1 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +293 -4
- package/dist/tools/actionAuditTools.d.ts +2 -0
- package/dist/tools/actionAuditTools.js +180 -0
- package/dist/tools/authorizationTools.d.ts +2 -0
- package/dist/tools/authorizationTools.js +201 -0
- package/dist/tools/dataModelingTools.d.ts +2 -0
- package/dist/tools/dataModelingTools.js +168 -0
- package/dist/tools/deploymentTools.js +42 -2
- package/dist/tools/devSetupTools.d.ts +2 -0
- package/dist/tools/devSetupTools.js +170 -0
- package/dist/tools/embeddingProvider.d.ts +6 -0
- package/dist/tools/embeddingProvider.js +3 -0
- package/dist/tools/functionTools.js +24 -1
- package/dist/tools/httpTools.js +128 -48
- package/dist/tools/migrationTools.d.ts +2 -0
- package/dist/tools/migrationTools.js +133 -0
- package/dist/tools/paginationTools.d.ts +2 -0
- package/dist/tools/paginationTools.js +125 -0
- package/dist/tools/qualityGateTools.d.ts +2 -0
- package/dist/tools/qualityGateTools.js +204 -0
- package/dist/tools/queryEfficiencyTools.d.ts +2 -0
- package/dist/tools/queryEfficiencyTools.js +191 -0
- package/dist/tools/reportingTools.d.ts +2 -0
- package/dist/tools/reportingTools.js +240 -0
- package/dist/tools/schedulerTools.d.ts +2 -0
- package/dist/tools/schedulerTools.js +197 -0
- package/dist/tools/schemaTools.js +95 -1
- package/dist/tools/storageAuditTools.d.ts +2 -0
- package/dist/tools/storageAuditTools.js +148 -0
- package/dist/tools/toolRegistry.d.ts +4 -0
- package/dist/tools/toolRegistry.js +274 -11
- package/dist/tools/transactionSafetyTools.d.ts +2 -0
- package/dist/tools/transactionSafetyTools.js +166 -0
- package/dist/tools/typeSafetyTools.d.ts +2 -0
- package/dist/tools/typeSafetyTools.js +146 -0
- package/dist/tools/vectorSearchTools.d.ts +2 -0
- package/dist/tools/vectorSearchTools.js +192 -0
- package/dist/types.d.ts +6 -1
- package/package.json +1 -1
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
import { readFileSync, existsSync, readdirSync } from "node:fs";
|
|
2
|
+
import { join, resolve } from "node:path";
|
|
3
|
+
import { getDb, genId } from "../db.js";
|
|
4
|
+
import { getQuickRef } from "./toolRegistry.js";
|
|
5
|
+
// ── Helpers ──────────────────────────────────────────────────────────
|
|
6
|
+
function findConvexDir(projectDir) {
|
|
7
|
+
const candidates = [join(projectDir, "convex"), join(projectDir, "src", "convex")];
|
|
8
|
+
for (const c of candidates) {
|
|
9
|
+
if (existsSync(c))
|
|
10
|
+
return c;
|
|
11
|
+
}
|
|
12
|
+
return null;
|
|
13
|
+
}
|
|
14
|
+
function collectTsFiles(dir) {
|
|
15
|
+
const results = [];
|
|
16
|
+
if (!existsSync(dir))
|
|
17
|
+
return results;
|
|
18
|
+
const entries = readdirSync(dir, { withFileTypes: true });
|
|
19
|
+
for (const entry of entries) {
|
|
20
|
+
const full = join(dir, entry.name);
|
|
21
|
+
if (entry.isDirectory() && entry.name !== "node_modules" && entry.name !== "_generated") {
|
|
22
|
+
results.push(...collectTsFiles(full));
|
|
23
|
+
}
|
|
24
|
+
else if (entry.isFile() && entry.name.endsWith(".ts")) {
|
|
25
|
+
results.push(full);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return results;
|
|
29
|
+
}
|
|
30
|
+
function auditSchedulers(convexDir) {
|
|
31
|
+
const files = collectTsFiles(convexDir);
|
|
32
|
+
const issues = [];
|
|
33
|
+
let totalSchedulerCalls = 0;
|
|
34
|
+
let runAfterCalls = 0;
|
|
35
|
+
let runAtCalls = 0;
|
|
36
|
+
let selfSchedulingFunctions = 0;
|
|
37
|
+
const filesWithSchedulers = new Set();
|
|
38
|
+
for (const filePath of files) {
|
|
39
|
+
const content = readFileSync(filePath, "utf-8");
|
|
40
|
+
const relativePath = filePath.replace(convexDir, "").replace(/^[\\/]/, "");
|
|
41
|
+
const lines = content.split("\n");
|
|
42
|
+
// Find all exported functions (mutation/action) and their bodies
|
|
43
|
+
const funcPattern = /export\s+(?:const\s+(\w+)\s*=|default)\s+(mutation|internalMutation|action|internalAction)\s*\(/g;
|
|
44
|
+
let m;
|
|
45
|
+
while ((m = funcPattern.exec(content)) !== null) {
|
|
46
|
+
const funcName = m[1] || "default";
|
|
47
|
+
const funcType = m[2];
|
|
48
|
+
const startLine = content.slice(0, m.index).split("\n").length - 1;
|
|
49
|
+
// Extract body
|
|
50
|
+
let depth = 0;
|
|
51
|
+
let foundOpen = false;
|
|
52
|
+
let endLine = Math.min(startLine + 100, lines.length);
|
|
53
|
+
for (let j = startLine; j < lines.length; j++) {
|
|
54
|
+
for (const ch of lines[j]) {
|
|
55
|
+
if (ch === "{") {
|
|
56
|
+
depth++;
|
|
57
|
+
foundOpen = true;
|
|
58
|
+
}
|
|
59
|
+
if (ch === "}")
|
|
60
|
+
depth--;
|
|
61
|
+
}
|
|
62
|
+
if (foundOpen && depth <= 0) {
|
|
63
|
+
endLine = j + 1;
|
|
64
|
+
break;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
const body = lines.slice(startLine, endLine).join("\n");
|
|
68
|
+
// Check scheduler calls in body
|
|
69
|
+
const runAfterMatches = [...body.matchAll(/ctx\.scheduler\.runAfter\s*\(/g)];
|
|
70
|
+
const runAtMatches = [...body.matchAll(/ctx\.scheduler\.runAt\s*\(/g)];
|
|
71
|
+
const allSchedulerCalls = runAfterMatches.length + runAtMatches.length;
|
|
72
|
+
if (allSchedulerCalls === 0)
|
|
73
|
+
continue;
|
|
74
|
+
filesWithSchedulers.add(relativePath);
|
|
75
|
+
totalSchedulerCalls += allSchedulerCalls;
|
|
76
|
+
runAfterCalls += runAfterMatches.length;
|
|
77
|
+
runAtCalls += runAtMatches.length;
|
|
78
|
+
// Check 1: Self-scheduling (infinite loop risk)
|
|
79
|
+
// Detect: function schedules itself by name
|
|
80
|
+
const selfRefPattern = new RegExp(`ctx\\.scheduler\\.run(?:After|At)\\s*\\([^,]*,\\s*(?:internal|api)\\.[^,]*\\.${funcName}\\b`);
|
|
81
|
+
if (selfRefPattern.test(body)) {
|
|
82
|
+
selfSchedulingFunctions++;
|
|
83
|
+
// Check if there's a termination condition
|
|
84
|
+
const hasTermination = /if\s*\(|return\s+(?:null|undefined|void)|\.length\s*(?:===?|<=?)\s*0/.test(body);
|
|
85
|
+
issues.push({
|
|
86
|
+
severity: hasTermination ? "warning" : "critical",
|
|
87
|
+
location: `${relativePath}:${startLine + 1}`,
|
|
88
|
+
functionName: funcName,
|
|
89
|
+
message: `${funcType} "${funcName}" schedules itself${hasTermination ? " (has conditional guard)" : " without clear termination — infinite loop risk"}.`,
|
|
90
|
+
fix: hasTermination
|
|
91
|
+
? "Verify the termination condition covers all edge cases"
|
|
92
|
+
: "Add a termination condition (max retries, empty queue check) before self-scheduling",
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
// Check 2: Very short delay (< 1 second) — may indicate missing backoff
|
|
96
|
+
for (const match of runAfterMatches) {
|
|
97
|
+
const callIdx = content.indexOf(match[0], m.index);
|
|
98
|
+
const callLine = content.slice(0, callIdx).split("\n").length;
|
|
99
|
+
const afterCall = content.slice(callIdx, callIdx + 100);
|
|
100
|
+
const delayMatch = afterCall.match(/runAfter\s*\(\s*(\d+(?:\.\d+)?)\s*[,)]/);
|
|
101
|
+
if (delayMatch) {
|
|
102
|
+
const delay = parseFloat(delayMatch[1]);
|
|
103
|
+
if (delay < 1) {
|
|
104
|
+
issues.push({
|
|
105
|
+
severity: "warning",
|
|
106
|
+
location: `${relativePath}:${callLine}`,
|
|
107
|
+
functionName: funcName,
|
|
108
|
+
message: `scheduler.runAfter(${delay}, ...) uses sub-second delay. In retry/loop patterns this can overwhelm the scheduler.`,
|
|
109
|
+
fix: "Use at least 1-second delay. For retries, implement exponential backoff (e.g., delay * 2^attempt)",
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
// Check 3: Scheduler in action without try/catch
|
|
115
|
+
if ((funcType === "action" || funcType === "internalAction") && allSchedulerCalls > 0) {
|
|
116
|
+
// Check if the scheduler call is wrapped in try/catch
|
|
117
|
+
if (!/try\s*\{/.test(body)) {
|
|
118
|
+
issues.push({
|
|
119
|
+
severity: "info",
|
|
120
|
+
location: `${relativePath}:${startLine + 1}`,
|
|
121
|
+
functionName: funcName,
|
|
122
|
+
message: `${funcType} "${funcName}" uses scheduler without try/catch. If the action fails before scheduling, work may be lost.`,
|
|
123
|
+
fix: "Wrap scheduler calls in try/catch or move scheduling to a mutation for transactional guarantees",
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
// Check 4: Multiple scheduler calls in same function (fan-out)
|
|
128
|
+
if (allSchedulerCalls > 3) {
|
|
129
|
+
issues.push({
|
|
130
|
+
severity: "info",
|
|
131
|
+
location: `${relativePath}:${startLine + 1}`,
|
|
132
|
+
functionName: funcName,
|
|
133
|
+
message: `${funcType} "${funcName}" makes ${allSchedulerCalls} scheduler calls. Consider if a single orchestrator action would be cleaner.`,
|
|
134
|
+
fix: "Group related work into fewer scheduled calls or use a queue-based pattern",
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
// Check 5: Scheduling from a query (not possible — queries are read-only)
|
|
138
|
+
if (funcType.includes("Query") || funcType === "query" || funcType === "internalQuery") {
|
|
139
|
+
issues.push({
|
|
140
|
+
severity: "critical",
|
|
141
|
+
location: `${relativePath}:${startLine + 1}`,
|
|
142
|
+
functionName: funcName,
|
|
143
|
+
message: `Query "${funcName}" tries to use ctx.scheduler — queries are read-only and cannot schedule functions.`,
|
|
144
|
+
fix: "Move scheduler calls to a mutation or action",
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
return {
|
|
150
|
+
issues,
|
|
151
|
+
stats: {
|
|
152
|
+
totalSchedulerCalls,
|
|
153
|
+
runAfterCalls,
|
|
154
|
+
runAtCalls,
|
|
155
|
+
selfSchedulingFunctions,
|
|
156
|
+
filesWithSchedulers: filesWithSchedulers.size,
|
|
157
|
+
},
|
|
158
|
+
};
|
|
159
|
+
}
|
|
160
|
+
// ── Tool Definition ─────────────────────────────────────────────────
|
|
161
|
+
export const schedulerTools = [
|
|
162
|
+
{
|
|
163
|
+
name: "convex_audit_schedulers",
|
|
164
|
+
description: "Audit Convex scheduled function usage (ctx.scheduler.runAfter/runAt): detects infinite self-scheduling loops, sub-second delays without backoff, scheduler calls in queries (impossible), unprotected scheduler calls in actions, and excessive fan-out patterns.",
|
|
165
|
+
inputSchema: {
|
|
166
|
+
type: "object",
|
|
167
|
+
properties: {
|
|
168
|
+
projectDir: {
|
|
169
|
+
type: "string",
|
|
170
|
+
description: "Absolute path to the project root containing a convex/ directory",
|
|
171
|
+
},
|
|
172
|
+
},
|
|
173
|
+
required: ["projectDir"],
|
|
174
|
+
},
|
|
175
|
+
handler: async (args) => {
|
|
176
|
+
const projectDir = resolve(args.projectDir);
|
|
177
|
+
const convexDir = findConvexDir(projectDir);
|
|
178
|
+
if (!convexDir) {
|
|
179
|
+
return { error: "No convex/ directory found" };
|
|
180
|
+
}
|
|
181
|
+
const { issues, stats } = auditSchedulers(convexDir);
|
|
182
|
+
const db = getDb();
|
|
183
|
+
db.prepare("INSERT INTO audit_results (id, project_dir, audit_type, issues_json, issue_count) VALUES (?, ?, ?, ?, ?)").run(genId("audit"), projectDir, "scheduler_audit", JSON.stringify(issues), issues.length);
|
|
184
|
+
return {
|
|
185
|
+
summary: {
|
|
186
|
+
...stats,
|
|
187
|
+
totalIssues: issues.length,
|
|
188
|
+
critical: issues.filter(i => i.severity === "critical").length,
|
|
189
|
+
warnings: issues.filter(i => i.severity === "warning").length,
|
|
190
|
+
},
|
|
191
|
+
issues: issues.slice(0, 30),
|
|
192
|
+
quickRef: getQuickRef("convex_audit_schedulers"),
|
|
193
|
+
};
|
|
194
|
+
},
|
|
195
|
+
},
|
|
196
|
+
];
|
|
197
|
+
//# sourceMappingURL=schedulerTools.js.map
|
|
@@ -156,6 +156,55 @@ function analyzeSchema(schemaContent, filePath) {
|
|
|
156
156
|
});
|
|
157
157
|
return issues;
|
|
158
158
|
}
|
|
159
|
+
function analyzeAdvancedIndexes(schemaContent) {
|
|
160
|
+
const searchIndexes = [];
|
|
161
|
+
const vectorIndexes = [];
|
|
162
|
+
const lines = schemaContent.split("\n");
|
|
163
|
+
let currentTable = "";
|
|
164
|
+
for (let i = 0; i < lines.length; i++) {
|
|
165
|
+
const line = lines[i];
|
|
166
|
+
// Track current table context
|
|
167
|
+
const tableDef = line.match(/(\w+)\s*[:=]\s*defineTable\s*\(/);
|
|
168
|
+
if (tableDef)
|
|
169
|
+
currentTable = tableDef[1];
|
|
170
|
+
// Detect .searchIndex("name", { ... })
|
|
171
|
+
const searchMatch = line.match(/\.searchIndex\s*\(\s*["']([^"']+)["']/);
|
|
172
|
+
if (searchMatch && currentTable) {
|
|
173
|
+
// Look ahead for searchField and filterFields
|
|
174
|
+
const chunk = lines.slice(i, Math.min(i + 10, lines.length)).join("\n");
|
|
175
|
+
const searchFieldMatch = chunk.match(/searchField\s*:\s*["']([^"']+)["']/);
|
|
176
|
+
const filterFieldsMatch = chunk.match(/filterFields\s*:\s*\[([^\]]*)\]/);
|
|
177
|
+
const filterFields = filterFieldsMatch
|
|
178
|
+
? (filterFieldsMatch[1].match(/["']([^"']+)["']/g) || []).map(f => f.replace(/["']/g, ""))
|
|
179
|
+
: [];
|
|
180
|
+
searchIndexes.push({
|
|
181
|
+
table: currentTable,
|
|
182
|
+
name: searchMatch[1],
|
|
183
|
+
searchField: searchFieldMatch?.[1] || "unknown",
|
|
184
|
+
filterFields,
|
|
185
|
+
line: i + 1,
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
// Detect .vectorIndex("name", { ... })
|
|
189
|
+
const vectorMatch = line.match(/\.vectorIndex\s*\(\s*["']([^"']+)["']/);
|
|
190
|
+
if (vectorMatch && currentTable) {
|
|
191
|
+
const chunk = lines.slice(i, Math.min(i + 10, lines.length)).join("\n");
|
|
192
|
+
const dimMatch = chunk.match(/dimensions\s*:\s*(\d+)/);
|
|
193
|
+
const filterFieldsMatch = chunk.match(/filterFields\s*:\s*\[([^\]]*)\]/);
|
|
194
|
+
const filterFields = filterFieldsMatch
|
|
195
|
+
? (filterFieldsMatch[1].match(/["']([^"']+)["']/g) || []).map(f => f.replace(/["']/g, ""))
|
|
196
|
+
: [];
|
|
197
|
+
vectorIndexes.push({
|
|
198
|
+
table: currentTable,
|
|
199
|
+
name: vectorMatch[1],
|
|
200
|
+
dimensions: dimMatch ? parseInt(dimMatch[1]) : undefined,
|
|
201
|
+
filterFields,
|
|
202
|
+
line: i + 1,
|
|
203
|
+
});
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
return { searchIndexes, vectorIndexes };
|
|
207
|
+
}
|
|
159
208
|
function analyzeValidatorCoverage(convexDir) {
|
|
160
209
|
const files = collectTsFiles(convexDir);
|
|
161
210
|
const result = {
|
|
@@ -283,12 +332,16 @@ export const schemaTools = [
|
|
|
283
332
|
}
|
|
284
333
|
const schemaPath = join(convexDir, "schema.ts");
|
|
285
334
|
const issues = analyzeSchema(schemaContent, schemaPath);
|
|
335
|
+
const advancedIndexes = analyzeAdvancedIndexes(schemaContent);
|
|
286
336
|
// Store audit result
|
|
287
337
|
const db = getDb();
|
|
288
338
|
db.prepare("INSERT INTO audit_results (id, project_dir, audit_type, file_path, issues_json, issue_count) VALUES (?, ?, ?, ?, ?, ?)").run(genId("audit"), projectDir, "schema", schemaPath, JSON.stringify(issues), issues.length);
|
|
289
339
|
const critical = issues.filter((i) => i.severity === "critical");
|
|
290
340
|
const warnings = issues.filter((i) => i.severity === "warning");
|
|
291
341
|
const info = issues.filter((i) => i.severity === "info");
|
|
342
|
+
// Count tables and regular indexes
|
|
343
|
+
const tableCount = (schemaContent.match(/defineTable\s*\(/g) || []).length;
|
|
344
|
+
const regularIndexCount = (schemaContent.match(/\.index\s*\(/g) || []).length;
|
|
292
345
|
return {
|
|
293
346
|
summary: {
|
|
294
347
|
totalIssues: issues.length,
|
|
@@ -296,8 +349,18 @@ export const schemaTools = [
|
|
|
296
349
|
warnings: warnings.length,
|
|
297
350
|
info: info.length,
|
|
298
351
|
schemaFile: schemaPath,
|
|
352
|
+
tables: tableCount,
|
|
353
|
+
regularIndexes: regularIndexCount,
|
|
354
|
+
searchIndexes: advancedIndexes.searchIndexes.length,
|
|
355
|
+
vectorIndexes: advancedIndexes.vectorIndexes.length,
|
|
299
356
|
},
|
|
300
357
|
issues,
|
|
358
|
+
searchIndexes: advancedIndexes.searchIndexes.length > 0
|
|
359
|
+
? advancedIndexes.searchIndexes
|
|
360
|
+
: undefined,
|
|
361
|
+
vectorIndexes: advancedIndexes.vectorIndexes.length > 0
|
|
362
|
+
? advancedIndexes.vectorIndexes
|
|
363
|
+
: undefined,
|
|
301
364
|
quickRef: getQuickRef("convex_audit_schema"),
|
|
302
365
|
};
|
|
303
366
|
},
|
|
@@ -322,9 +385,40 @@ export const schemaTools = [
|
|
|
322
385
|
return { error: "No convex/ directory found" };
|
|
323
386
|
}
|
|
324
387
|
const suggestions = suggestIndexes(convexDir);
|
|
388
|
+
// Add existing index context per table mentioned in suggestions
|
|
389
|
+
const schemaContent = readSchemaFile(convexDir);
|
|
390
|
+
const existingByTable = {};
|
|
391
|
+
if (schemaContent) {
|
|
392
|
+
let currentTable = "";
|
|
393
|
+
for (const line of schemaContent.split("\n")) {
|
|
394
|
+
const tableDef = line.match(/(\w+)\s*[:=]\s*defineTable\s*\(/);
|
|
395
|
+
if (tableDef)
|
|
396
|
+
currentTable = tableDef[1];
|
|
397
|
+
const idxMatch = line.match(/\.index\s*\(\s*["']([^"']+)["']/);
|
|
398
|
+
if (idxMatch && currentTable) {
|
|
399
|
+
if (!existingByTable[currentTable])
|
|
400
|
+
existingByTable[currentTable] = [];
|
|
401
|
+
existingByTable[currentTable].push(idxMatch[1]);
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
}
|
|
405
|
+
// Group suggestions by table for cleaner output
|
|
406
|
+
const byTable = {};
|
|
407
|
+
for (const s of suggestions) {
|
|
408
|
+
if (!byTable[s.table])
|
|
409
|
+
byTable[s.table] = [];
|
|
410
|
+
byTable[s.table].push(s);
|
|
411
|
+
}
|
|
325
412
|
return {
|
|
326
413
|
totalSuggestions: suggestions.length,
|
|
327
|
-
|
|
414
|
+
tablesNeedingIndexes: Object.keys(byTable).length,
|
|
415
|
+
suggestionsByTable: Object.entries(byTable)
|
|
416
|
+
.sort(([, a], [, b]) => b.length - a.length)
|
|
417
|
+
.map(([table, sugs]) => ({
|
|
418
|
+
table,
|
|
419
|
+
existingIndexes: existingByTable[table] || [],
|
|
420
|
+
suggestions: sugs,
|
|
421
|
+
})),
|
|
328
422
|
quickRef: getQuickRef("convex_suggest_indexes"),
|
|
329
423
|
};
|
|
330
424
|
},
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import { readFileSync, existsSync, readdirSync } from "node:fs";
|
|
2
|
+
import { join, resolve } from "node:path";
|
|
3
|
+
import { getDb, genId } from "../db.js";
|
|
4
|
+
import { getQuickRef } from "./toolRegistry.js";
|
|
5
|
+
// ── Helpers ──────────────────────────────────────────────────────────
|
|
6
|
+
function findConvexDir(projectDir) {
|
|
7
|
+
const candidates = [join(projectDir, "convex"), join(projectDir, "src", "convex")];
|
|
8
|
+
for (const c of candidates) {
|
|
9
|
+
if (existsSync(c))
|
|
10
|
+
return c;
|
|
11
|
+
}
|
|
12
|
+
return null;
|
|
13
|
+
}
|
|
14
|
+
function collectTsFiles(dir) {
|
|
15
|
+
const results = [];
|
|
16
|
+
if (!existsSync(dir))
|
|
17
|
+
return results;
|
|
18
|
+
const entries = readdirSync(dir, { withFileTypes: true });
|
|
19
|
+
for (const entry of entries) {
|
|
20
|
+
const full = join(dir, entry.name);
|
|
21
|
+
if (entry.isDirectory() && entry.name !== "node_modules" && entry.name !== "_generated") {
|
|
22
|
+
results.push(...collectTsFiles(full));
|
|
23
|
+
}
|
|
24
|
+
else if (entry.isFile() && entry.name.endsWith(".ts")) {
|
|
25
|
+
results.push(full);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return results;
|
|
29
|
+
}
|
|
30
|
+
function auditStorageUsage(convexDir) {
|
|
31
|
+
const files = collectTsFiles(convexDir);
|
|
32
|
+
const issues = [];
|
|
33
|
+
let filesUsingStorage = 0;
|
|
34
|
+
let storageGetCalls = 0;
|
|
35
|
+
let storageGetUrlCalls = 0;
|
|
36
|
+
let storageStoreCalls = 0;
|
|
37
|
+
let storageDeleteCalls = 0;
|
|
38
|
+
let missingNullChecks = 0;
|
|
39
|
+
for (const filePath of files) {
|
|
40
|
+
const content = readFileSync(filePath, "utf-8");
|
|
41
|
+
const relativePath = filePath.replace(convexDir, "").replace(/^[\\/]/, "");
|
|
42
|
+
const lines = content.split("\n");
|
|
43
|
+
const usesStorage = /ctx\.storage\./.test(content);
|
|
44
|
+
if (!usesStorage)
|
|
45
|
+
continue;
|
|
46
|
+
filesUsingStorage++;
|
|
47
|
+
for (let i = 0; i < lines.length; i++) {
|
|
48
|
+
const line = lines[i];
|
|
49
|
+
// Count storage operations
|
|
50
|
+
if (/ctx\.storage\.get\s*\(/.test(line))
|
|
51
|
+
storageGetCalls++;
|
|
52
|
+
if (/ctx\.storage\.getUrl\s*\(/.test(line))
|
|
53
|
+
storageGetUrlCalls++;
|
|
54
|
+
if (/ctx\.storage\.store\s*\(/.test(line))
|
|
55
|
+
storageStoreCalls++;
|
|
56
|
+
if (/ctx\.storage\.delete\s*\(/.test(line))
|
|
57
|
+
storageDeleteCalls++;
|
|
58
|
+
// Check 1: ctx.storage.get() or getUrl() without null check
|
|
59
|
+
const getMatch = line.match(/(?:const|let)\s+(\w+)\s*=\s*await\s+ctx\.storage\.(get|getUrl)\s*\(/);
|
|
60
|
+
if (getMatch) {
|
|
61
|
+
const varName = getMatch[1];
|
|
62
|
+
const method = getMatch[2];
|
|
63
|
+
// Look ahead 5 lines for null check
|
|
64
|
+
const lookahead = lines.slice(i, Math.min(i + 6, lines.length)).join("\n");
|
|
65
|
+
const hasNullCheck = new RegExp(`if\\s*\\(\\s*!${varName}\\b|if\\s*\\(\\s*${varName}\\s*===?\\s*null|${varName}\\s*\\?\\.|${varName}\\s*!\\s*\\.`).test(lookahead);
|
|
66
|
+
if (!hasNullCheck) {
|
|
67
|
+
missingNullChecks++;
|
|
68
|
+
issues.push({
|
|
69
|
+
severity: "warning",
|
|
70
|
+
location: `${relativePath}:${i + 1}`,
|
|
71
|
+
message: `ctx.storage.${method}() result "${varName}" not null-checked. Returns null if file doesn't exist.`,
|
|
72
|
+
fix: `Add: if (!${varName}) { throw new Error("File not found"); }`,
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
// Check 2: Storage ID stored as string instead of Id<'_storage'>
|
|
78
|
+
// Look for v.string() in schema fields that likely hold storage IDs
|
|
79
|
+
const storageIdFields = content.match(/(\w*(?:storage|file|image|avatar|thumbnail|attachment|media|upload|blob|asset)\w*)\s*:\s*v\.string\s*\(\s*\)/gi);
|
|
80
|
+
if (storageIdFields) {
|
|
81
|
+
for (const fieldMatch of storageIdFields) {
|
|
82
|
+
const fieldName = fieldMatch.split(":")[0].trim();
|
|
83
|
+
issues.push({
|
|
84
|
+
severity: "info",
|
|
85
|
+
location: relativePath,
|
|
86
|
+
message: `Field "${fieldName}" uses v.string() but appears to be a storage ID. Use v.id("_storage") for type safety.`,
|
|
87
|
+
fix: `Change ${fieldName}: v.string() to ${fieldName}: v.id("_storage")`,
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
// Check 3: Orphan risk — stores without corresponding deletes
|
|
93
|
+
if (storageStoreCalls > 0 && storageDeleteCalls === 0) {
|
|
94
|
+
issues.push({
|
|
95
|
+
severity: "info",
|
|
96
|
+
location: "project-wide",
|
|
97
|
+
message: `${storageStoreCalls} storage.store() calls but 0 storage.delete() calls. Stored files may become orphaned when records are deleted.`,
|
|
98
|
+
fix: "Add ctx.storage.delete(storageId) when deleting records that reference stored files",
|
|
99
|
+
});
|
|
100
|
+
}
|
|
101
|
+
return {
|
|
102
|
+
issues,
|
|
103
|
+
stats: {
|
|
104
|
+
filesUsingStorage,
|
|
105
|
+
storageGetCalls,
|
|
106
|
+
storageGetUrlCalls,
|
|
107
|
+
storageStoreCalls,
|
|
108
|
+
storageDeleteCalls,
|
|
109
|
+
missingNullChecks,
|
|
110
|
+
},
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
// ── Tool Definition ─────────────────────────────────────────────────
|
|
114
|
+
export const storageAuditTools = [
|
|
115
|
+
{
|
|
116
|
+
name: "convex_audit_storage_usage",
|
|
117
|
+
description: "Audit Convex file storage usage: missing null checks on ctx.storage.get()/getUrl(), storage IDs stored as v.string() instead of v.id('_storage'), and orphaned file risk (store calls without corresponding deletes).",
|
|
118
|
+
inputSchema: {
|
|
119
|
+
type: "object",
|
|
120
|
+
properties: {
|
|
121
|
+
projectDir: {
|
|
122
|
+
type: "string",
|
|
123
|
+
description: "Absolute path to the project root containing a convex/ directory",
|
|
124
|
+
},
|
|
125
|
+
},
|
|
126
|
+
required: ["projectDir"],
|
|
127
|
+
},
|
|
128
|
+
handler: async (args) => {
|
|
129
|
+
const projectDir = resolve(args.projectDir);
|
|
130
|
+
const convexDir = findConvexDir(projectDir);
|
|
131
|
+
if (!convexDir) {
|
|
132
|
+
return { error: "No convex/ directory found" };
|
|
133
|
+
}
|
|
134
|
+
const { issues, stats } = auditStorageUsage(convexDir);
|
|
135
|
+
const db = getDb();
|
|
136
|
+
db.prepare("INSERT INTO audit_results (id, project_dir, audit_type, issues_json, issue_count) VALUES (?, ?, ?, ?, ?)").run(genId("audit"), projectDir, "storage_usage", JSON.stringify(issues), issues.length);
|
|
137
|
+
return {
|
|
138
|
+
summary: {
|
|
139
|
+
...stats,
|
|
140
|
+
totalIssues: issues.length,
|
|
141
|
+
},
|
|
142
|
+
issues: issues.slice(0, 30),
|
|
143
|
+
quickRef: getQuickRef("convex_audit_storage_usage"),
|
|
144
|
+
};
|
|
145
|
+
},
|
|
146
|
+
},
|
|
147
|
+
];
|
|
148
|
+
//# sourceMappingURL=storageAuditTools.js.map
|
|
@@ -9,5 +9,9 @@ export declare function findTools(query: string): ScoredToolEntry[];
|
|
|
9
9
|
/**
|
|
10
10
|
* Async wrapper around findTools that fuses BM25 results with embedding RRF
|
|
11
11
|
* when a neural embedding provider is available. Falls back to plain findTools otherwise.
|
|
12
|
+
*
|
|
13
|
+
* Uses Agent-as-a-Graph bipartite RRF (arxiv:2511.18194):
|
|
14
|
+
* - Tool nodes get direct wRRF with α_T = 1.0
|
|
15
|
+
* - Domain nodes get stronger wRRF with α_D = 1.5 (paper-optimal, lifts sibling tools in that category)
|
|
12
16
|
*/
|
|
13
17
|
export declare function findToolsWithEmbedding(query: string): Promise<ScoredToolEntry[]>;
|