preflight-dev 3.1.0 → 3.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +77 -16
- package/dist/cli/init.js +0 -48
- package/dist/cli/init.js.map +1 -1
- package/dist/index.js +7 -0
- package/dist/index.js.map +1 -1
- package/dist/lib/contracts.d.ts +27 -0
- package/dist/lib/contracts.js +309 -0
- package/dist/lib/contracts.js.map +1 -0
- package/dist/lib/patterns.d.ts +38 -0
- package/dist/lib/patterns.js +176 -0
- package/dist/lib/patterns.js.map +1 -0
- package/dist/lib/triage.d.ts +2 -0
- package/dist/lib/triage.js.map +1 -1
- package/dist/profiles.js +4 -0
- package/dist/profiles.js.map +1 -1
- package/dist/tools/check-patterns.d.ts +2 -0
- package/dist/tools/check-patterns.js +33 -0
- package/dist/tools/check-patterns.js.map +1 -0
- package/dist/tools/clarify-intent.js +9 -1
- package/dist/tools/clarify-intent.js.map +1 -1
- package/dist/tools/enrich-agent-task.js +132 -3
- package/dist/tools/enrich-agent-task.js.map +1 -1
- package/dist/tools/estimate-cost.d.ts +2 -0
- package/dist/tools/estimate-cost.js +261 -0
- package/dist/tools/estimate-cost.js.map +1 -0
- package/dist/tools/generate-scorecard.js +466 -14
- package/dist/tools/generate-scorecard.js.map +1 -1
- package/dist/tools/log-correction.js +7 -1
- package/dist/tools/log-correction.js.map +1 -1
- package/dist/tools/onboard-project.js +10 -1
- package/dist/tools/onboard-project.js.map +1 -1
- package/dist/tools/preflight-check.js +16 -0
- package/dist/tools/preflight-check.js.map +1 -1
- package/dist/tools/scope-work.js +6 -0
- package/dist/tools/scope-work.js.map +1 -1
- package/dist/tools/search-contracts.d.ts +2 -0
- package/dist/tools/search-contracts.js +46 -0
- package/dist/tools/search-contracts.js.map +1 -0
- package/dist/tools/session-stats.js +2 -0
- package/dist/tools/session-stats.js.map +1 -1
- package/package.json +1 -1
- package/src/index.ts +7 -0
- package/src/lib/contracts.ts +354 -0
- package/src/lib/patterns.ts +210 -0
- package/src/lib/triage.ts +2 -0
- package/src/profiles.ts +4 -0
- package/src/tools/check-patterns.ts +43 -0
- package/src/tools/clarify-intent.ts +10 -1
- package/src/tools/enrich-agent-task.ts +150 -3
- package/src/tools/estimate-cost.ts +332 -0
- package/src/tools/generate-scorecard.ts +541 -14
- package/src/tools/log-correction.ts +8 -1
- package/src/tools/onboard-project.ts +10 -1
- package/src/tools/preflight-check.ts +19 -0
- package/src/tools/scope-work.ts +7 -0
- package/src/tools/search-contracts.ts +61 -0
- package/src/tools/session-stats.ts +2 -0
|
@@ -0,0 +1,354 @@
|
|
|
1
|
+
// =============================================================================
|
|
2
|
+
// Cross-Service Contract Registry
|
|
3
|
+
// =============================================================================
|
|
4
|
+
// Extracts, stores, and searches API contracts, types, and schemas
|
|
5
|
+
// across projects for fast cross-service context lookups.
|
|
6
|
+
// =============================================================================
|
|
7
|
+
|
|
8
|
+
import { existsSync, readFileSync, writeFileSync, mkdirSync, readdirSync, statSync } from "fs";
|
|
9
|
+
import { join, resolve, relative, basename, extname } from "path";
|
|
10
|
+
import { createHash } from "crypto";
|
|
11
|
+
import { homedir } from "os";
|
|
12
|
+
import { load as yamlLoad } from "js-yaml";
|
|
13
|
+
|
|
14
|
+
// --- Types ---
|
|
15
|
+
|
|
16
|
+
export interface Contract {
|
|
17
|
+
name: string;
|
|
18
|
+
kind: "interface" | "type" | "enum" | "route" | "schema" | "event" | "model";
|
|
19
|
+
file: string;
|
|
20
|
+
definition: string;
|
|
21
|
+
project: string;
|
|
22
|
+
extractedAt: string;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
interface ManualContractField {
|
|
26
|
+
name: string;
|
|
27
|
+
type: string;
|
|
28
|
+
required?: boolean;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
interface ManualContractEntry {
|
|
32
|
+
name: string;
|
|
33
|
+
kind: string;
|
|
34
|
+
fields?: ManualContractField[];
|
|
35
|
+
description?: string;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// --- Helpers ---
|
|
39
|
+
|
|
40
|
+
const PREFLIGHT_DIR = join(homedir(), ".preflight");
|
|
41
|
+
const PROJECTS_DIR = join(PREFLIGHT_DIR, "projects");
|
|
42
|
+
|
|
43
|
+
function hashProjectDir(projectDir: string): string {
|
|
44
|
+
return createHash("sha256").update(resolve(projectDir)).digest("hex").slice(0, 12);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function contractsPath(projectHash: string): string {
|
|
48
|
+
return join(PROJECTS_DIR, projectHash, "contracts.json");
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
// --- File Discovery ---
|
|
52
|
+
|
|
53
|
+
/** Recursively find files matching patterns, skipping node_modules/.git/dist */
|
|
54
|
+
function findFiles(dir: string, patterns: RegExp[], maxDepth = 8, depth = 0): string[] {
|
|
55
|
+
if (depth > maxDepth) return [];
|
|
56
|
+
const results: string[] = [];
|
|
57
|
+
|
|
58
|
+
let entries: string[];
|
|
59
|
+
try {
|
|
60
|
+
entries = readdirSync(dir);
|
|
61
|
+
} catch {
|
|
62
|
+
return results;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
for (const entry of entries) {
|
|
66
|
+
if (entry === "node_modules" || entry === ".git" || entry === "dist" || entry === ".next" || entry === "build") continue;
|
|
67
|
+
const full = join(dir, entry);
|
|
68
|
+
let st;
|
|
69
|
+
try { st = statSync(full); } catch { continue; }
|
|
70
|
+
|
|
71
|
+
if (st.isDirectory()) {
|
|
72
|
+
results.push(...findFiles(full, patterns, maxDepth, depth + 1));
|
|
73
|
+
} else if (patterns.some(p => p.test(full))) {
|
|
74
|
+
results.push(full);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
return results;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// --- Extraction ---
|
|
81
|
+
|
|
82
|
+
const TS_CONTRACT_PATTERNS: RegExp[] = [
|
|
83
|
+
// types.ts, types/*.ts, interfaces.ts, *.d.ts
|
|
84
|
+
/\/types\.ts$/, /\/types\/[^/]+\.ts$/, /\/interfaces\.ts$/, /\.d\.ts$/,
|
|
85
|
+
// API routes
|
|
86
|
+
/\/api\/.*\.ts$/, /\/routes\/.*\.ts$/,
|
|
87
|
+
// Event schemas
|
|
88
|
+
/\/events\/.*\.ts$/, /\/schemas\/.*\.ts$/,
|
|
89
|
+
];
|
|
90
|
+
|
|
91
|
+
const PRISMA_PATTERN = /prisma\/schema\.prisma$/;
|
|
92
|
+
const OPENAPI_PATTERN = /\/(openapi\.ya?ml|swagger\.json)$/;
|
|
93
|
+
|
|
94
|
+
/** Extract TypeScript interfaces, types, and enums from source */
|
|
95
|
+
function extractTsContracts(content: string, filePath: string, projectDir: string): Contract[] {
|
|
96
|
+
const contracts: Contract[] = [];
|
|
97
|
+
const relPath = relative(projectDir, filePath);
|
|
98
|
+
const now = new Date().toISOString();
|
|
99
|
+
|
|
100
|
+
// Match: export interface Foo { ... }
|
|
101
|
+
const interfaceRe = /export\s+interface\s+(\w+)(?:\s+extends\s+[^{]+)?\s*\{[^}]*(?:\{[^}]*\}[^}]*)*\}/g;
|
|
102
|
+
for (const m of content.matchAll(interfaceRe)) {
|
|
103
|
+
contracts.push({ name: m[1], kind: "interface", file: relPath, definition: m[0].slice(0, 500), project: projectDir, extractedAt: now });
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Match: export type Foo = ...
|
|
107
|
+
const typeRe = /export\s+type\s+(\w+)\s*(?:<[^>]*>)?\s*=[^;]+;/g;
|
|
108
|
+
for (const m of content.matchAll(typeRe)) {
|
|
109
|
+
contracts.push({ name: m[1], kind: "type", file: relPath, definition: m[0].slice(0, 500), project: projectDir, extractedAt: now });
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// Match: export enum Foo { ... }
|
|
113
|
+
const enumRe = /export\s+enum\s+(\w+)\s*\{[^}]*\}/g;
|
|
114
|
+
for (const m of content.matchAll(enumRe)) {
|
|
115
|
+
contracts.push({ name: m[1], kind: "enum", file: relPath, definition: m[0].slice(0, 500), project: projectDir, extractedAt: now });
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Detect route definitions (Next.js/Express patterns)
|
|
119
|
+
const isRouteFile = /\/(api|routes)\//.test(filePath);
|
|
120
|
+
if (isRouteFile) {
|
|
121
|
+
// export async function GET/POST/PUT/DELETE/PATCH
|
|
122
|
+
const routeRe = /export\s+(?:async\s+)?function\s+(GET|POST|PUT|DELETE|PATCH|handler)\b[^{]*\{/g;
|
|
123
|
+
for (const m of content.matchAll(routeRe)) {
|
|
124
|
+
const routeName = `${m[1]} ${relPath.replace(/\.(ts|js)$/, "")}`;
|
|
125
|
+
const defStart = m.index!;
|
|
126
|
+
contracts.push({ name: routeName, kind: "route", file: relPath, definition: content.slice(defStart, defStart + 500), project: projectDir, extractedAt: now });
|
|
127
|
+
}
|
|
128
|
+
// router.get/post/put/delete
|
|
129
|
+
const expressRe = /router\.(get|post|put|delete|patch)\s*\(\s*['"`]([^'"`]+)['"`]/g;
|
|
130
|
+
for (const m of content.matchAll(expressRe)) {
|
|
131
|
+
contracts.push({ name: `${m[1].toUpperCase()} ${m[2]}`, kind: "route", file: relPath, definition: m[0].slice(0, 500), project: projectDir, extractedAt: now });
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
return contracts;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/** Extract Prisma models */
|
|
139
|
+
function extractPrismaContracts(content: string, filePath: string, projectDir: string): Contract[] {
|
|
140
|
+
const contracts: Contract[] = [];
|
|
141
|
+
const relPath = relative(projectDir, filePath);
|
|
142
|
+
const now = new Date().toISOString();
|
|
143
|
+
|
|
144
|
+
const modelRe = /model\s+(\w+)\s*\{[^}]*\}/g;
|
|
145
|
+
for (const m of content.matchAll(modelRe)) {
|
|
146
|
+
contracts.push({ name: m[1], kind: "model", file: relPath, definition: m[0].slice(0, 500), project: projectDir, extractedAt: now });
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
const enumRe = /enum\s+(\w+)\s*\{[^}]*\}/g;
|
|
150
|
+
for (const m of content.matchAll(enumRe)) {
|
|
151
|
+
contracts.push({ name: m[1], kind: "enum", file: relPath, definition: m[0].slice(0, 500), project: projectDir, extractedAt: now });
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
return contracts;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/** Extract from OpenAPI/Swagger specs */
|
|
158
|
+
function extractOpenApiContracts(content: string, filePath: string, projectDir: string): Contract[] {
|
|
159
|
+
const contracts: Contract[] = [];
|
|
160
|
+
const relPath = relative(projectDir, filePath);
|
|
161
|
+
const now = new Date().toISOString();
|
|
162
|
+
|
|
163
|
+
try {
|
|
164
|
+
const spec = filePath.endsWith(".json") ? JSON.parse(content) : yamlLoad(content) as any;
|
|
165
|
+
if (spec?.paths) {
|
|
166
|
+
for (const [path, methods] of Object.entries(spec.paths as Record<string, any>)) {
|
|
167
|
+
for (const method of Object.keys(methods)) {
|
|
168
|
+
if (["get", "post", "put", "delete", "patch"].includes(method)) {
|
|
169
|
+
const op = methods[method];
|
|
170
|
+
const name = `${method.toUpperCase()} ${path}`;
|
|
171
|
+
const def = JSON.stringify({ summary: op.summary, parameters: op.parameters, requestBody: op.requestBody }, null, 2);
|
|
172
|
+
contracts.push({ name, kind: "route", file: relPath, definition: def.slice(0, 500), project: projectDir, extractedAt: now });
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
if (spec?.components?.schemas) {
|
|
178
|
+
for (const [name, schema] of Object.entries(spec.components.schemas)) {
|
|
179
|
+
contracts.push({ name, kind: "schema", file: relPath, definition: JSON.stringify(schema, null, 2).slice(0, 500), project: projectDir, extractedAt: now });
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
} catch {
|
|
183
|
+
// Invalid spec, skip
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
return contracts;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// --- Public API ---
|
|
190
|
+
|
|
191
|
+
/** Scan a project directory and extract all contracts */
|
|
192
|
+
export function extractContracts(projectDir: string): Contract[] {
|
|
193
|
+
const absDir = resolve(projectDir);
|
|
194
|
+
const contracts: Contract[] = [];
|
|
195
|
+
|
|
196
|
+
// TypeScript files
|
|
197
|
+
const tsFiles = findFiles(absDir, TS_CONTRACT_PATTERNS);
|
|
198
|
+
for (const file of tsFiles) {
|
|
199
|
+
try {
|
|
200
|
+
const content = readFileSync(file, "utf-8");
|
|
201
|
+
contracts.push(...extractTsContracts(content, file, absDir));
|
|
202
|
+
} catch { /* skip unreadable */ }
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// Prisma
|
|
206
|
+
const prismaFiles = findFiles(absDir, [PRISMA_PATTERN], 3);
|
|
207
|
+
for (const file of prismaFiles) {
|
|
208
|
+
try {
|
|
209
|
+
const content = readFileSync(file, "utf-8");
|
|
210
|
+
contracts.push(...extractPrismaContracts(content, file, absDir));
|
|
211
|
+
} catch { /* skip */ }
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// OpenAPI
|
|
215
|
+
const openApiFiles = findFiles(absDir, [OPENAPI_PATTERN], 5);
|
|
216
|
+
for (const file of openApiFiles) {
|
|
217
|
+
try {
|
|
218
|
+
const content = readFileSync(file, "utf-8");
|
|
219
|
+
contracts.push(...extractOpenApiContracts(content, file, absDir));
|
|
220
|
+
} catch { /* skip */ }
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
return contracts;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
/** Load manual contract definitions from .preflight/contracts/ */
|
|
227
|
+
export function loadManualContracts(projectDir: string): Contract[] {
|
|
228
|
+
const contractsDir = join(projectDir, ".preflight", "contracts");
|
|
229
|
+
if (!existsSync(contractsDir)) return [];
|
|
230
|
+
|
|
231
|
+
const contracts: Contract[] = [];
|
|
232
|
+
const now = new Date().toISOString();
|
|
233
|
+
|
|
234
|
+
let entries: string[];
|
|
235
|
+
try { entries = readdirSync(contractsDir); } catch { return []; }
|
|
236
|
+
|
|
237
|
+
for (const entry of entries) {
|
|
238
|
+
if (![".yml", ".yaml"].includes(extname(entry))) continue;
|
|
239
|
+
const filePath = join(contractsDir, entry);
|
|
240
|
+
try {
|
|
241
|
+
const content = readFileSync(filePath, "utf-8");
|
|
242
|
+
const items = yamlLoad(content) as ManualContractEntry[];
|
|
243
|
+
if (!Array.isArray(items)) continue;
|
|
244
|
+
|
|
245
|
+
for (const item of items) {
|
|
246
|
+
if (!item.name || !item.kind) continue;
|
|
247
|
+
const def = item.fields
|
|
248
|
+
? `${item.description || ""}\nFields: ${item.fields.map(f => `${f.name}: ${f.type}${f.required ? " (required)" : ""}`).join(", ")}`
|
|
249
|
+
: item.description || "";
|
|
250
|
+
contracts.push({
|
|
251
|
+
name: item.name,
|
|
252
|
+
kind: item.kind as Contract["kind"],
|
|
253
|
+
file: `.preflight/contracts/${entry}`,
|
|
254
|
+
definition: def.trim().slice(0, 500),
|
|
255
|
+
project: resolve(projectDir),
|
|
256
|
+
extractedAt: now,
|
|
257
|
+
});
|
|
258
|
+
}
|
|
259
|
+
} catch { /* skip invalid */ }
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
return contracts;
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
/** Load stored contracts for a project */
|
|
266
|
+
export function loadContracts(projectHash: string): Contract[] {
|
|
267
|
+
const p = contractsPath(projectHash);
|
|
268
|
+
if (!existsSync(p)) return [];
|
|
269
|
+
try {
|
|
270
|
+
return JSON.parse(readFileSync(p, "utf-8"));
|
|
271
|
+
} catch {
|
|
272
|
+
return [];
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
/** Save contracts to storage */
|
|
277
|
+
export function saveContracts(projectHash: string, contracts: Contract[]): void {
|
|
278
|
+
const dir = join(PROJECTS_DIR, projectHash);
|
|
279
|
+
mkdirSync(dir, { recursive: true });
|
|
280
|
+
writeFileSync(contractsPath(projectHash), JSON.stringify(contracts, null, 2));
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
/** Search contracts by query string (simple relevance scoring) */
|
|
284
|
+
export function searchContracts(query: string, contracts: Contract[]): Contract[] {
|
|
285
|
+
const terms = query.toLowerCase().split(/\s+/).filter(t => t.length > 1);
|
|
286
|
+
if (terms.length === 0) return contracts;
|
|
287
|
+
|
|
288
|
+
const scored = contracts.map(c => {
|
|
289
|
+
const nameL = c.name.toLowerCase();
|
|
290
|
+
const defL = c.definition.toLowerCase();
|
|
291
|
+
const fileL = c.file.toLowerCase();
|
|
292
|
+
let score = 0;
|
|
293
|
+
|
|
294
|
+
for (const term of terms) {
|
|
295
|
+
// Exact name match is highest
|
|
296
|
+
if (nameL === term) score += 10;
|
|
297
|
+
// Name contains term
|
|
298
|
+
else if (nameL.includes(term)) score += 5;
|
|
299
|
+
// File contains term
|
|
300
|
+
if (fileL.includes(term)) score += 2;
|
|
301
|
+
// Definition contains term
|
|
302
|
+
if (defL.includes(term)) score += 1;
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
return { contract: c, score };
|
|
306
|
+
});
|
|
307
|
+
|
|
308
|
+
return scored
|
|
309
|
+
.filter(s => s.score > 0)
|
|
310
|
+
.sort((a, b) => b.score - a.score)
|
|
311
|
+
.map(s => s.contract);
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
/** Extract and save contracts for a project, merging with manual definitions */
|
|
315
|
+
export function extractAndSaveContracts(projectDir: string): { count: number; hash: string } {
|
|
316
|
+
const absDir = resolve(projectDir);
|
|
317
|
+
const hash = hashProjectDir(absDir);
|
|
318
|
+
|
|
319
|
+
const autoContracts = extractContracts(absDir);
|
|
320
|
+
const manualContracts = loadManualContracts(absDir);
|
|
321
|
+
|
|
322
|
+
// Manual contracts take precedence (same name = manual wins)
|
|
323
|
+
const manualNames = new Set(manualContracts.map(c => c.name));
|
|
324
|
+
const merged = [
|
|
325
|
+
...manualContracts,
|
|
326
|
+
...autoContracts.filter(c => !manualNames.has(c.name)),
|
|
327
|
+
];
|
|
328
|
+
|
|
329
|
+
saveContracts(hash, merged);
|
|
330
|
+
return { count: merged.length, hash };
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
/** Load all contracts for a list of project directories */
|
|
334
|
+
export function loadAllContracts(projectDirs: string[]): Contract[] {
|
|
335
|
+
const all: Contract[] = [];
|
|
336
|
+
for (const dir of projectDirs) {
|
|
337
|
+
const hash = hashProjectDir(resolve(dir));
|
|
338
|
+
all.push(...loadContracts(hash));
|
|
339
|
+
}
|
|
340
|
+
return all;
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
/** Format contracts for display in tool output */
|
|
344
|
+
export function formatContracts(contracts: Contract[], limit = 10): string {
|
|
345
|
+
if (contracts.length === 0) return "";
|
|
346
|
+
|
|
347
|
+
const lines = contracts.slice(0, limit).map(c => {
|
|
348
|
+
const proj = basename(c.project);
|
|
349
|
+
const defPreview = c.definition.split("\n")[0].slice(0, 120);
|
|
350
|
+
return `From ${proj}: ${c.kind} ${c.name} — ${defPreview} (${c.file})`;
|
|
351
|
+
});
|
|
352
|
+
|
|
353
|
+
return lines.join("\n");
|
|
354
|
+
}
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Correction pattern learning system.
|
|
3
|
+
* Analyzes correction logs and extracts recurring patterns so preflight
|
|
4
|
+
* can warn about known pitfalls before they happen again.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { readLog, saveState, loadState } from "./state.js";
|
|
8
|
+
|
|
9
|
+
// ── Types ──────────────────────────────────────────────────────────────────
|
|
10
|
+
|
|
11
|
+
export interface CorrectionPattern {
|
|
12
|
+
id: string;
|
|
13
|
+
pattern: string;
|
|
14
|
+
keywords: string[];
|
|
15
|
+
frequency: number;
|
|
16
|
+
lastSeen: string;
|
|
17
|
+
context: string;
|
|
18
|
+
examples: string[];
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
// ── Helpers ────────────────────────────────────────────────────────────────
|
|
22
|
+
|
|
23
|
+
/** Extract meaningful keywords (3+ chars, lowercased, deduplicated). */
|
|
24
|
+
function extractKeywords(text: string): string[] {
|
|
25
|
+
const stopWords = new Set([
|
|
26
|
+
"the", "and", "that", "this", "with", "for", "not", "but", "was", "are",
|
|
27
|
+
"you", "your", "have", "has", "had", "been", "were", "will", "would",
|
|
28
|
+
"could", "should", "can", "did", "does", "don", "isn", "isn't", "don't",
|
|
29
|
+
"use", "used", "using", "from", "into", "about", "just", "wrong", "again",
|
|
30
|
+
"said", "instead", "meant", "want", "wanted", "need", "like",
|
|
31
|
+
]);
|
|
32
|
+
const words = text
|
|
33
|
+
.replace(/[^a-zA-Z0-9_\-/.]/g, " ")
|
|
34
|
+
.split(/\s+/)
|
|
35
|
+
.filter((w) => w.length >= 3)
|
|
36
|
+
.map((w) => w.toLowerCase())
|
|
37
|
+
.filter((w) => !stopWords.has(w));
|
|
38
|
+
return [...new Set(words)];
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/** Compute overlap ratio between two keyword sets. */
|
|
42
|
+
function keywordOverlap(a: string[], b: string[]): number {
|
|
43
|
+
if (a.length === 0 || b.length === 0) return 0;
|
|
44
|
+
const setB = new Set(b);
|
|
45
|
+
const shared = a.filter((w) => setB.has(w)).length;
|
|
46
|
+
return shared / Math.min(a.length, b.length);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// ── Core Functions ─────────────────────────────────────────────────────────
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Read correction log and extract recurring patterns.
|
|
53
|
+
* Groups corrections by keyword similarity. If 2+ corrections share
|
|
54
|
+
* enough keywords, creates a pattern.
|
|
55
|
+
*/
|
|
56
|
+
export function extractPatterns(): CorrectionPattern[] {
|
|
57
|
+
const corrections = readLog("corrections.jsonl");
|
|
58
|
+
if (corrections.length === 0) return [];
|
|
59
|
+
|
|
60
|
+
// Build keyword index per correction
|
|
61
|
+
const entries = corrections.map((c) => ({
|
|
62
|
+
text: `${c.user_said || ""} ${c.wrong_action || ""} ${c.root_cause || ""}`,
|
|
63
|
+
keywords: extractKeywords(
|
|
64
|
+
`${c.user_said || ""} ${c.wrong_action || ""} ${c.root_cause || ""}`,
|
|
65
|
+
),
|
|
66
|
+
timestamp: c.timestamp as string,
|
|
67
|
+
userSaid: (c.user_said || "") as string,
|
|
68
|
+
}));
|
|
69
|
+
|
|
70
|
+
// Group by similarity (greedy clustering)
|
|
71
|
+
const used = new Set<number>();
|
|
72
|
+
const groups: number[][] = [];
|
|
73
|
+
|
|
74
|
+
for (let i = 0; i < entries.length; i++) {
|
|
75
|
+
if (used.has(i)) continue;
|
|
76
|
+
const group = [i];
|
|
77
|
+
used.add(i);
|
|
78
|
+
for (let j = i + 1; j < entries.length; j++) {
|
|
79
|
+
if (used.has(j)) continue;
|
|
80
|
+
if (keywordOverlap(entries[i].keywords, entries[j].keywords) >= 0.3) {
|
|
81
|
+
group.push(j);
|
|
82
|
+
used.add(j);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
if (group.length >= 2) {
|
|
86
|
+
groups.push(group);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Convert groups to patterns
|
|
91
|
+
const patterns: CorrectionPattern[] = groups.map((group, idx) => {
|
|
92
|
+
const groupEntries = group.map((i) => entries[i]);
|
|
93
|
+
|
|
94
|
+
// Merge keywords, ranked by frequency
|
|
95
|
+
const kwFreq: Record<string, number> = {};
|
|
96
|
+
for (const e of groupEntries) {
|
|
97
|
+
for (const kw of e.keywords) {
|
|
98
|
+
kwFreq[kw] = (kwFreq[kw] || 0) + 1;
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
const topKeywords = Object.entries(kwFreq)
|
|
102
|
+
.sort((a, b) => b[1] - a[1])
|
|
103
|
+
.slice(0, 8)
|
|
104
|
+
.map(([k]) => k);
|
|
105
|
+
|
|
106
|
+
// Most recent timestamp
|
|
107
|
+
const lastSeen = groupEntries
|
|
108
|
+
.map((e) => e.timestamp)
|
|
109
|
+
.filter(Boolean)
|
|
110
|
+
.sort()
|
|
111
|
+
.pop() || new Date().toISOString();
|
|
112
|
+
|
|
113
|
+
// Examples (up to 3 user_said messages)
|
|
114
|
+
const examples = groupEntries
|
|
115
|
+
.map((e) => e.userSaid)
|
|
116
|
+
.filter((s) => s.length > 0)
|
|
117
|
+
.slice(0, 3);
|
|
118
|
+
|
|
119
|
+
// Build human-readable pattern from top keywords
|
|
120
|
+
const patternDesc = `Recurring correction: ${topKeywords.slice(0, 4).join(", ")}`;
|
|
121
|
+
|
|
122
|
+
// Context from the most detailed correction
|
|
123
|
+
const longestEntry = groupEntries.sort((a, b) => b.text.length - a.text.length)[0];
|
|
124
|
+
const context = longestEntry.text.trim().slice(0, 300);
|
|
125
|
+
|
|
126
|
+
return {
|
|
127
|
+
id: `p${idx + 1}`,
|
|
128
|
+
pattern: patternDesc,
|
|
129
|
+
keywords: topKeywords,
|
|
130
|
+
frequency: group.length,
|
|
131
|
+
lastSeen,
|
|
132
|
+
context,
|
|
133
|
+
examples,
|
|
134
|
+
};
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
return patterns.sort((a, b) => b.frequency - a.frequency);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
/**
|
|
141
|
+
* Check if a prompt matches any known patterns.
|
|
142
|
+
* Returns patterns whose keywords overlap with the prompt.
|
|
143
|
+
*/
|
|
144
|
+
export function matchPatterns(
|
|
145
|
+
prompt: string,
|
|
146
|
+
patterns: CorrectionPattern[],
|
|
147
|
+
): CorrectionPattern[] {
|
|
148
|
+
if (patterns.length === 0) return [];
|
|
149
|
+
const promptKeywords = extractKeywords(prompt);
|
|
150
|
+
const promptLower = prompt.toLowerCase();
|
|
151
|
+
|
|
152
|
+
return patterns.filter((p) => {
|
|
153
|
+
// Direct keyword match: at least 2 keywords present in prompt
|
|
154
|
+
const hits = p.keywords.filter(
|
|
155
|
+
(kw) => promptLower.includes(kw.toLowerCase()),
|
|
156
|
+
);
|
|
157
|
+
return hits.length >= 2;
|
|
158
|
+
});
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
/** Save patterns to state. */
|
|
162
|
+
export function savePatterns(patterns: CorrectionPattern[]): void {
|
|
163
|
+
saveState("patterns", { patterns, updated: new Date().toISOString() });
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
/** Load patterns from state. */
|
|
167
|
+
export function loadPatterns(): CorrectionPattern[] {
|
|
168
|
+
const state = loadState("patterns");
|
|
169
|
+
return (state.patterns as CorrectionPattern[]) || [];
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
/**
|
|
173
|
+
* Re-extract patterns from corrections log and save them.
|
|
174
|
+
* Returns the updated patterns.
|
|
175
|
+
*/
|
|
176
|
+
export function refreshPatterns(): CorrectionPattern[] {
|
|
177
|
+
const patterns = extractPatterns();
|
|
178
|
+
savePatterns(patterns);
|
|
179
|
+
return patterns;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/**
|
|
183
|
+
* Format matched patterns for display.
|
|
184
|
+
*/
|
|
185
|
+
export function formatPatternMatches(matches: CorrectionPattern[]): string {
|
|
186
|
+
if (matches.length === 0) return "";
|
|
187
|
+
|
|
188
|
+
const lines = ["⚠️ Known patterns matched:", ""];
|
|
189
|
+
for (let i = 0; i < matches.length; i++) {
|
|
190
|
+
const p = matches[i];
|
|
191
|
+
const ago = formatTimeAgo(p.lastSeen);
|
|
192
|
+
lines.push(`${i + 1}. "${p.pattern}" (corrected ${p.frequency}x)`);
|
|
193
|
+
lines.push(` Context: ${p.context.slice(0, 150)}`);
|
|
194
|
+
lines.push(` Last triggered: ${ago}`);
|
|
195
|
+
lines.push("");
|
|
196
|
+
}
|
|
197
|
+
return lines.join("\n");
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
function formatTimeAgo(isoDate: string): string {
|
|
201
|
+
try {
|
|
202
|
+
const diff = Date.now() - new Date(isoDate).getTime();
|
|
203
|
+
const days = Math.floor(diff / 86400000);
|
|
204
|
+
if (days === 0) return "today";
|
|
205
|
+
if (days === 1) return "yesterday";
|
|
206
|
+
return `${days} days ago`;
|
|
207
|
+
} catch {
|
|
208
|
+
return "unknown";
|
|
209
|
+
}
|
|
210
|
+
}
|
package/src/lib/triage.ts
CHANGED
|
@@ -35,6 +35,8 @@ export interface TriageConfig {
|
|
|
35
35
|
crossServiceKeywords?: string[];
|
|
36
36
|
strictness?: string; // 'relaxed' | 'standard' | 'strict'
|
|
37
37
|
relatedAliases?: string[];
|
|
38
|
+
/** Number of matched correction patterns — boosts level to at least ambiguous. */
|
|
39
|
+
patternMatchCount?: number;
|
|
38
40
|
}
|
|
39
41
|
|
|
40
42
|
// ── Constants ──────────────────────────────────────────────────────────────
|
package/src/profiles.ts
CHANGED
|
@@ -32,6 +32,7 @@ const STANDARD_TOOLS = new Set([
|
|
|
32
32
|
"checkpoint",
|
|
33
33
|
"check_session_health",
|
|
34
34
|
"log_correction",
|
|
35
|
+
"check_patterns",
|
|
35
36
|
"audit_workspace",
|
|
36
37
|
"session_handoff",
|
|
37
38
|
"what_changed",
|
|
@@ -40,11 +41,14 @@ const STANDARD_TOOLS = new Set([
|
|
|
40
41
|
"session_stats",
|
|
41
42
|
"prompt_score",
|
|
42
43
|
"generate_scorecard",
|
|
44
|
+
// Contract registry
|
|
45
|
+
"search_contracts",
|
|
43
46
|
// Timeline tools — local embeddings, zero config
|
|
44
47
|
"onboard_project",
|
|
45
48
|
"search_history",
|
|
46
49
|
"timeline_view",
|
|
47
50
|
"scan_sessions",
|
|
51
|
+
"estimate_cost",
|
|
48
52
|
]);
|
|
49
53
|
|
|
50
54
|
// Full = standard + OpenAI embedding option (needs API key)
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import type { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
3
|
+
import { loadPatterns, matchPatterns, formatPatternMatches } from "../lib/patterns.js";
|
|
4
|
+
|
|
5
|
+
export function registerCheckPatterns(server: McpServer): void {
|
|
6
|
+
server.tool(
|
|
7
|
+
"check_patterns",
|
|
8
|
+
"Check if the current prompt matches any learned correction patterns from past mistakes. Use this to avoid repeating known pitfalls.",
|
|
9
|
+
{
|
|
10
|
+
prompt: z.string().describe("The prompt to check against known patterns"),
|
|
11
|
+
},
|
|
12
|
+
async ({ prompt }) => {
|
|
13
|
+
const patterns = loadPatterns();
|
|
14
|
+
|
|
15
|
+
if (patterns.length === 0) {
|
|
16
|
+
return {
|
|
17
|
+
content: [{
|
|
18
|
+
type: "text" as const,
|
|
19
|
+
text: "✅ No correction patterns learned yet. Patterns are extracted automatically as corrections are logged.",
|
|
20
|
+
}],
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const matches = matchPatterns(prompt, patterns);
|
|
25
|
+
|
|
26
|
+
if (matches.length === 0) {
|
|
27
|
+
return {
|
|
28
|
+
content: [{
|
|
29
|
+
type: "text" as const,
|
|
30
|
+
text: `✅ No known pitfalls matched. (${patterns.length} patterns tracked)`,
|
|
31
|
+
}],
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
return {
|
|
36
|
+
content: [{
|
|
37
|
+
type: "text" as const,
|
|
38
|
+
text: formatPatternMatches(matches),
|
|
39
|
+
}],
|
|
40
|
+
};
|
|
41
|
+
},
|
|
42
|
+
);
|
|
43
|
+
}
|
|
@@ -5,7 +5,8 @@ import { findWorkspaceDocs, PROJECT_DIR } from "../lib/files.js";
|
|
|
5
5
|
import { searchSemantic } from "../lib/timeline-db.js";
|
|
6
6
|
import { getRelatedProjects } from "../lib/config.js";
|
|
7
7
|
import { existsSync, readFileSync } from "fs";
|
|
8
|
-
import { join, basename } from "path";
|
|
8
|
+
import { join, basename, resolve } from "path";
|
|
9
|
+
import { loadAllContracts, searchContracts, formatContracts } from "../lib/contracts.js";
|
|
9
10
|
|
|
10
11
|
/** Parse test failures from common report formats without fragile shell pipelines */
|
|
11
12
|
function getTestFailures(): string {
|
|
@@ -176,6 +177,14 @@ export function registerClarifyIntent(server: McpServer): void {
|
|
|
176
177
|
hasDirtyFiles: dirtyCount > 0,
|
|
177
178
|
});
|
|
178
179
|
|
|
180
|
+
// Check contracts FIRST (fast, no vector search)
|
|
181
|
+
const contractDirs = [resolve(PROJECT_DIR), ...getRelatedProjects()];
|
|
182
|
+
const allContracts = loadAllContracts(contractDirs);
|
|
183
|
+
const matchedContracts = searchContracts(user_message, allContracts);
|
|
184
|
+
if (matchedContracts.length > 0) {
|
|
185
|
+
sections.push(`## Matching Contracts\n${formatContracts(matchedContracts, 8)}`);
|
|
186
|
+
}
|
|
187
|
+
|
|
179
188
|
// Search for cross-project context
|
|
180
189
|
const crossProjectContext = await searchCrossProjectContext(user_message);
|
|
181
190
|
if (crossProjectContext.length > 0) {
|