@massu/core 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +71 -0
- package/README.md +2 -2
- package/dist/hooks/cost-tracker.js +149 -11527
- package/dist/hooks/post-edit-context.js +127 -11493
- package/dist/hooks/post-tool-use.js +169 -11550
- package/dist/hooks/pre-compact.js +149 -11530
- package/dist/hooks/pre-delete-check.js +144 -11523
- package/dist/hooks/quality-event.js +149 -11527
- package/dist/hooks/session-end.js +188 -11570
- package/dist/hooks/session-start.js +159 -11534
- package/dist/hooks/user-prompt.js +149 -11530
- package/package.json +14 -19
- package/src/adr-generator.ts +292 -0
- package/src/analytics.ts +373 -0
- package/src/audit-trail.ts +450 -0
- package/src/backfill-sessions.ts +180 -0
- package/src/cli.ts +105 -0
- package/src/cloud-sync.ts +190 -0
- package/src/commands/doctor.ts +300 -0
- package/src/commands/init.ts +395 -0
- package/src/commands/install-hooks.ts +26 -0
- package/src/config.ts +357 -0
- package/src/cost-tracker.ts +355 -0
- package/src/db.ts +233 -0
- package/src/dependency-scorer.ts +337 -0
- package/src/docs-map.json +100 -0
- package/src/docs-tools.ts +517 -0
- package/src/domains.ts +181 -0
- package/src/hooks/cost-tracker.ts +66 -0
- package/src/hooks/intent-suggester.ts +131 -0
- package/src/hooks/post-edit-context.ts +91 -0
- package/src/hooks/post-tool-use.ts +175 -0
- package/src/hooks/pre-compact.ts +146 -0
- package/src/hooks/pre-delete-check.ts +153 -0
- package/src/hooks/quality-event.ts +127 -0
- package/src/hooks/security-gate.ts +121 -0
- package/src/hooks/session-end.ts +467 -0
- package/src/hooks/session-start.ts +210 -0
- package/src/hooks/user-prompt.ts +91 -0
- package/src/import-resolver.ts +224 -0
- package/src/memory-db.ts +1376 -0
- package/src/memory-tools.ts +391 -0
- package/src/middleware-tree.ts +70 -0
- package/src/observability-tools.ts +343 -0
- package/src/observation-extractor.ts +411 -0
- package/src/page-deps.ts +283 -0
- package/src/prompt-analyzer.ts +332 -0
- package/src/regression-detector.ts +319 -0
- package/src/rules.ts +57 -0
- package/src/schema-mapper.ts +232 -0
- package/src/security-scorer.ts +405 -0
- package/src/security-utils.ts +133 -0
- package/src/sentinel-db.ts +578 -0
- package/src/sentinel-scanner.ts +405 -0
- package/src/sentinel-tools.ts +512 -0
- package/src/sentinel-types.ts +140 -0
- package/src/server.ts +189 -0
- package/src/session-archiver.ts +112 -0
- package/src/session-state-generator.ts +174 -0
- package/src/team-knowledge.ts +407 -0
- package/src/tools.ts +847 -0
- package/src/transcript-parser.ts +458 -0
- package/src/trpc-index.ts +214 -0
- package/src/validate-features-runner.ts +106 -0
- package/src/validation-engine.ts +358 -0
- package/dist/cli.js +0 -7890
- package/dist/server.js +0 -7008
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
// Copyright (c) 2026 Massu. All rights reserved.
|
|
2
|
+
// Licensed under BSL 1.1 - see LICENSE file for details.
|
|
3
|
+
|
|
4
|
+
import type { ParsedToolCall, TranscriptEntry, ExtractedDecision, ExtractedFailedAttempt } from './transcript-parser.ts';
|
|
5
|
+
import {
|
|
6
|
+
extractToolCalls,
|
|
7
|
+
extractDecisions,
|
|
8
|
+
extractFailedAttempts,
|
|
9
|
+
extractFileOperations,
|
|
10
|
+
extractVerificationCommands,
|
|
11
|
+
extractUserMessages,
|
|
12
|
+
estimateTokens,
|
|
13
|
+
} from './transcript-parser.ts';
|
|
14
|
+
import type { AddObservationOpts } from './memory-db.ts';
|
|
15
|
+
import { assignImportance } from './memory-db.ts';
|
|
16
|
+
import { detectDecisionPatterns } from './adr-generator.ts';
|
|
17
|
+
import { getProjectRoot } from './config.ts';
|
|
18
|
+
|
|
19
|
+
// ============================================================
|
|
20
|
+
// P2-002: Observation Extractor
|
|
21
|
+
// ============================================================
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Visibility classification for observations.
|
|
25
|
+
* - 'public': Safe to share across teams and sync to cloud (no secrets, no absolute paths)
|
|
26
|
+
* - 'private': Contains potentially sensitive data (file paths, env vars, credentials)
|
|
27
|
+
*/
|
|
28
|
+
export type ObservationVisibility = 'public' | 'private';
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* A structured observation ready for DB insertion.
|
|
32
|
+
*/
|
|
33
|
+
export interface ExtractedObservation {
|
|
34
|
+
type: string;
|
|
35
|
+
title: string;
|
|
36
|
+
detail: string | null;
|
|
37
|
+
visibility: ObservationVisibility;
|
|
38
|
+
opts: AddObservationOpts;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Patterns that indicate private/sensitive content.
|
|
43
|
+
* If any of these match the observation title or detail, it is classified as 'private'.
|
|
44
|
+
*/
|
|
45
|
+
const PRIVATE_PATTERNS = [
|
|
46
|
+
/\/Users\/\w+/, // Absolute macOS paths
|
|
47
|
+
/\/home\/\w+/, // Absolute Linux paths
|
|
48
|
+
/[A-Z]:\\/, // Windows paths
|
|
49
|
+
/\b(api[_-]?key|secret|token|password|credential|dsn)\b/i, // Secrets
|
|
50
|
+
/\b(STRIPE_|SUPABASE_|SENTRY_|AWS_|DATABASE_URL)\b/, // Env var names
|
|
51
|
+
/\.(env|pem|key|cert)\b/, // Sensitive file extensions
|
|
52
|
+
/Bearer\s+\S+/, // Auth tokens
|
|
53
|
+
/sk_live_|sk_test_|whsec_/, // Stripe keys
|
|
54
|
+
];
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Classify whether an observation is safe for public sharing or should remain private.
|
|
58
|
+
*/
|
|
59
|
+
export function classifyVisibility(title: string, detail: string | null): ObservationVisibility {
|
|
60
|
+
const text = `${title} ${detail ?? ''}`;
|
|
61
|
+
for (const pattern of PRIVATE_PATTERNS) {
|
|
62
|
+
if (pattern.test(text)) return 'private';
|
|
63
|
+
}
|
|
64
|
+
return 'public';
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Noise filtering rules - these tool calls add no memory value.
|
|
69
|
+
*/
|
|
70
|
+
export function isNoisyToolCall(tc: ParsedToolCall, seenReads: Set<string>): boolean {
|
|
71
|
+
// Glob/Grep tool calls (search operations, not actions)
|
|
72
|
+
if (tc.toolName === 'Glob' || tc.toolName === 'Grep') return true;
|
|
73
|
+
|
|
74
|
+
// Duplicate Read calls to the same file within same session
|
|
75
|
+
if (tc.toolName === 'Read') {
|
|
76
|
+
const filePath = tc.input.file_path as string ?? '';
|
|
77
|
+
if (seenReads.has(filePath)) return true;
|
|
78
|
+
seenReads.add(filePath);
|
|
79
|
+
|
|
80
|
+
// Read of node_modules files
|
|
81
|
+
if (filePath.includes('node_modules')) return true;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Bash with trivial commands
|
|
85
|
+
if (tc.toolName === 'Bash') {
|
|
86
|
+
const cmd = (tc.input.command as string ?? '').trim();
|
|
87
|
+
const trivialPatterns = /^(ls|pwd|echo|cat\s|head\s|tail\s|wc\s)/;
|
|
88
|
+
if (trivialPatterns.test(cmd)) return true;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Empty or error-only tool responses
|
|
92
|
+
if (!tc.result || tc.result.trim() === '') return true;
|
|
93
|
+
|
|
94
|
+
return false;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* Extract all observations from transcript entries.
|
|
99
|
+
* This is the main entry point for both real-time (P3-002) and backfill (P6-001).
|
|
100
|
+
*/
|
|
101
|
+
export function extractObservationsFromEntries(entries: TranscriptEntry[]): ExtractedObservation[] {
|
|
102
|
+
const observations: ExtractedObservation[] = [];
|
|
103
|
+
const seenReads = new Set<string>();
|
|
104
|
+
|
|
105
|
+
// 1. Extract from tool calls
|
|
106
|
+
const toolCalls = extractToolCalls(entries);
|
|
107
|
+
for (const tc of toolCalls) {
|
|
108
|
+
if (isNoisyToolCall(tc, seenReads)) continue;
|
|
109
|
+
const obs = classifyToolCall(tc);
|
|
110
|
+
if (obs) observations.push(obs);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// 2. Extract decisions from assistant text
|
|
114
|
+
const decisions = extractDecisions(entries);
|
|
115
|
+
for (const decision of decisions) {
|
|
116
|
+
const title = decision.text.slice(0, 200);
|
|
117
|
+
const detail = decision.context;
|
|
118
|
+
observations.push({
|
|
119
|
+
type: 'decision',
|
|
120
|
+
title,
|
|
121
|
+
detail,
|
|
122
|
+
visibility: classifyVisibility(title, detail),
|
|
123
|
+
opts: {
|
|
124
|
+
importance: assignImportance('decision'),
|
|
125
|
+
originalTokens: estimateTokens(decision.text + decision.context),
|
|
126
|
+
},
|
|
127
|
+
});
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
// 3. Extract failed attempts from assistant text
|
|
131
|
+
const failures = extractFailedAttempts(entries);
|
|
132
|
+
for (const failure of failures) {
|
|
133
|
+
const title = failure.text.slice(0, 200);
|
|
134
|
+
const detail = failure.context;
|
|
135
|
+
observations.push({
|
|
136
|
+
type: 'failed_attempt',
|
|
137
|
+
title,
|
|
138
|
+
detail,
|
|
139
|
+
visibility: classifyVisibility(title, detail),
|
|
140
|
+
opts: {
|
|
141
|
+
importance: assignImportance('failed_attempt'),
|
|
142
|
+
originalTokens: estimateTokens(failure.text + failure.context),
|
|
143
|
+
},
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
// 4. Extract verification commands
|
|
148
|
+
const verifications = extractVerificationCommands(toolCalls);
|
|
149
|
+
for (const vr of verifications) {
|
|
150
|
+
const title = `${vr.vrType}: ${vr.passed ? 'PASS' : 'FAIL'}`;
|
|
151
|
+
const detail = vr.command;
|
|
152
|
+
observations.push({
|
|
153
|
+
type: 'vr_check',
|
|
154
|
+
title,
|
|
155
|
+
detail,
|
|
156
|
+
visibility: classifyVisibility(title, detail),
|
|
157
|
+
opts: {
|
|
158
|
+
vrType: vr.vrType,
|
|
159
|
+
evidence: vr.result,
|
|
160
|
+
importance: assignImportance('vr_check', vr.passed ? 'PASS' : 'FAIL'),
|
|
161
|
+
originalTokens: estimateTokens(vr.result),
|
|
162
|
+
},
|
|
163
|
+
});
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
return observations;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
/**
|
|
170
|
+
* Classify a single tool call into an observation (or null if not observation-worthy).
|
|
171
|
+
*/
|
|
172
|
+
function classifyToolCall(tc: ParsedToolCall): ExtractedObservation | null {
|
|
173
|
+
const result = tc.result ?? '';
|
|
174
|
+
|
|
175
|
+
switch (tc.toolName) {
|
|
176
|
+
case 'Write': {
|
|
177
|
+
const filePath = tc.input.file_path as string ?? 'unknown';
|
|
178
|
+
const title = `Created/wrote: ${shortenPath(filePath)}`;
|
|
179
|
+
return {
|
|
180
|
+
type: 'file_change',
|
|
181
|
+
title,
|
|
182
|
+
detail: null,
|
|
183
|
+
visibility: classifyVisibility(title, filePath),
|
|
184
|
+
opts: {
|
|
185
|
+
filesInvolved: [filePath],
|
|
186
|
+
importance: assignImportance('file_change'),
|
|
187
|
+
originalTokens: estimateTokens(result),
|
|
188
|
+
...extractLinkedReferences(result + filePath),
|
|
189
|
+
},
|
|
190
|
+
};
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
case 'Edit': {
|
|
194
|
+
const filePath = tc.input.file_path as string ?? 'unknown';
|
|
195
|
+
const title = `Edited: ${shortenPath(filePath)}`;
|
|
196
|
+
return {
|
|
197
|
+
type: 'file_change',
|
|
198
|
+
title,
|
|
199
|
+
detail: null,
|
|
200
|
+
visibility: classifyVisibility(title, filePath),
|
|
201
|
+
opts: {
|
|
202
|
+
filesInvolved: [filePath],
|
|
203
|
+
importance: assignImportance('file_change'),
|
|
204
|
+
originalTokens: estimateTokens(result),
|
|
205
|
+
...extractLinkedReferences(result + filePath),
|
|
206
|
+
},
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
case 'Read': {
|
|
211
|
+
const filePath = tc.input.file_path as string ?? 'unknown';
|
|
212
|
+
// Only keep reads of interesting files (plan files, CLAUDE.md, etc.)
|
|
213
|
+
if (filePath.includes('/plans/') || filePath.includes('CLAUDE.md') || filePath.includes('CURRENT.md')) {
|
|
214
|
+
const title = `Read: ${shortenPath(filePath)}`;
|
|
215
|
+
return {
|
|
216
|
+
type: 'discovery',
|
|
217
|
+
title,
|
|
218
|
+
detail: null,
|
|
219
|
+
visibility: classifyVisibility(title, filePath),
|
|
220
|
+
opts: {
|
|
221
|
+
filesInvolved: [filePath],
|
|
222
|
+
importance: assignImportance('discovery'),
|
|
223
|
+
originalTokens: estimateTokens(result),
|
|
224
|
+
},
|
|
225
|
+
};
|
|
226
|
+
}
|
|
227
|
+
return null;
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
case 'Bash': {
|
|
231
|
+
const cmd = (tc.input.command as string ?? '').trim();
|
|
232
|
+
|
|
233
|
+
// Git commit
|
|
234
|
+
if (cmd.includes('git commit')) {
|
|
235
|
+
const commitMsg = extractCommitMessage(cmd);
|
|
236
|
+
const isfix = commitMsg.toLowerCase().includes('fix');
|
|
237
|
+
const title = `Commit: ${commitMsg.slice(0, 150)}`;
|
|
238
|
+
return {
|
|
239
|
+
type: isfix ? 'bugfix' : 'feature',
|
|
240
|
+
title,
|
|
241
|
+
detail: cmd,
|
|
242
|
+
visibility: classifyVisibility(title, cmd),
|
|
243
|
+
opts: {
|
|
244
|
+
importance: assignImportance(isfix ? 'bugfix' : 'feature'),
|
|
245
|
+
originalTokens: estimateTokens(result),
|
|
246
|
+
},
|
|
247
|
+
};
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
// Pattern scanner
|
|
251
|
+
if (cmd.includes('pattern-scanner')) {
|
|
252
|
+
const passed = !result.includes('FAIL') && !result.includes('BLOCKED');
|
|
253
|
+
const title = `Pattern Scanner: ${passed ? 'PASS' : 'FAIL'}`;
|
|
254
|
+
const detail = result.slice(0, 500);
|
|
255
|
+
return {
|
|
256
|
+
type: 'pattern_compliance',
|
|
257
|
+
title,
|
|
258
|
+
detail,
|
|
259
|
+
visibility: classifyVisibility(title, detail),
|
|
260
|
+
opts: {
|
|
261
|
+
evidence: result.slice(0, 500),
|
|
262
|
+
importance: assignImportance('pattern_compliance', passed ? 'PASS' : 'FAIL'),
|
|
263
|
+
originalTokens: estimateTokens(result),
|
|
264
|
+
},
|
|
265
|
+
};
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// npm test / vitest
|
|
269
|
+
if (cmd.includes('npm test') || cmd.includes('vitest')) {
|
|
270
|
+
const passed = !tc.isError && !result.includes('FAIL');
|
|
271
|
+
const title = `Tests: ${passed ? 'PASS' : 'FAIL'}`;
|
|
272
|
+
return {
|
|
273
|
+
type: 'vr_check',
|
|
274
|
+
title,
|
|
275
|
+
detail: cmd,
|
|
276
|
+
visibility: classifyVisibility(title, cmd),
|
|
277
|
+
opts: {
|
|
278
|
+
vrType: 'VR-TEST',
|
|
279
|
+
evidence: result.slice(0, 500),
|
|
280
|
+
importance: assignImportance('vr_check', passed ? 'PASS' : 'FAIL'),
|
|
281
|
+
originalTokens: estimateTokens(result),
|
|
282
|
+
},
|
|
283
|
+
};
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
// npm run build / tsc
|
|
287
|
+
if (cmd.includes('npm run build') || cmd.includes('tsc --noEmit')) {
|
|
288
|
+
const vrType = cmd.includes('tsc') ? 'VR-TYPE' : 'VR-BUILD';
|
|
289
|
+
const passed = !tc.isError && !result.includes('error');
|
|
290
|
+
const title = `${vrType}: ${passed ? 'PASS' : 'FAIL'}`;
|
|
291
|
+
return {
|
|
292
|
+
type: 'vr_check',
|
|
293
|
+
title,
|
|
294
|
+
detail: cmd,
|
|
295
|
+
visibility: classifyVisibility(title, cmd),
|
|
296
|
+
opts: {
|
|
297
|
+
vrType,
|
|
298
|
+
evidence: result.slice(0, 500),
|
|
299
|
+
importance: assignImportance('vr_check', passed ? 'PASS' : 'FAIL'),
|
|
300
|
+
originalTokens: estimateTokens(result),
|
|
301
|
+
},
|
|
302
|
+
};
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
return null;
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
default:
|
|
309
|
+
return null;
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
/**
|
|
314
|
+
* Extract CR rule, VR type, and plan item references from text.
|
|
315
|
+
*/
|
|
316
|
+
function extractLinkedReferences(text: string): Partial<AddObservationOpts> {
|
|
317
|
+
const result: Partial<AddObservationOpts> = {};
|
|
318
|
+
|
|
319
|
+
const crMatch = text.match(/CR-(\d+)/);
|
|
320
|
+
if (crMatch) result.crRule = `CR-${crMatch[1]}`;
|
|
321
|
+
|
|
322
|
+
const vrMatch = text.match(/VR-([A-Z_]+)/);
|
|
323
|
+
if (vrMatch) result.vrType = `VR-${vrMatch[1]}`;
|
|
324
|
+
|
|
325
|
+
const planMatch = text.match(/P(\d+)-(\d+)/);
|
|
326
|
+
if (planMatch) result.planItem = `P${planMatch[1]}-${planMatch[2]}`;
|
|
327
|
+
|
|
328
|
+
return result;
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
/**
|
|
332
|
+
* Extract commit message from a git commit command.
|
|
333
|
+
*/
|
|
334
|
+
function extractCommitMessage(cmd: string): string {
|
|
335
|
+
// Match -m "message" or -m 'message'
|
|
336
|
+
const match = cmd.match(/-m\s+["'](.+?)["']/);
|
|
337
|
+
if (match) return match[1];
|
|
338
|
+
|
|
339
|
+
// Match heredoc pattern
|
|
340
|
+
const heredocMatch = cmd.match(/<<['"]?EOF['"]?\s*\n?([\s\S]*?)EOF/);
|
|
341
|
+
if (heredocMatch) return heredocMatch[1].trim().split('\n')[0];
|
|
342
|
+
|
|
343
|
+
return 'Unknown commit';
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
/**
|
|
347
|
+
* Shorten a file path for display.
|
|
348
|
+
*/
|
|
349
|
+
function shortenPath(filePath: string): string {
|
|
350
|
+
// Remove project root prefix, then common home dir prefix
|
|
351
|
+
const root = getProjectRoot();
|
|
352
|
+
if (filePath.startsWith(root + '/')) {
|
|
353
|
+
return filePath.slice(root.length + 1);
|
|
354
|
+
}
|
|
355
|
+
return filePath.replace(/^\/Users\/\w+\//, '~/');
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
/**
|
|
359
|
+
* Classify a single tool call for real-time capture (used by PostToolUse hook P3-002).
|
|
360
|
+
* Applies noise filtering.
|
|
361
|
+
*/
|
|
362
|
+
export function classifyRealTimeToolCall(
|
|
363
|
+
toolName: string,
|
|
364
|
+
toolInput: Record<string, unknown>,
|
|
365
|
+
toolResponse: string,
|
|
366
|
+
seenReads: Set<string>
|
|
367
|
+
): ExtractedObservation | null {
|
|
368
|
+
const tc: ParsedToolCall = {
|
|
369
|
+
toolName,
|
|
370
|
+
toolUseId: '',
|
|
371
|
+
input: toolInput,
|
|
372
|
+
result: toolResponse,
|
|
373
|
+
isError: false,
|
|
374
|
+
};
|
|
375
|
+
|
|
376
|
+
if (isNoisyToolCall(tc, seenReads)) return null;
|
|
377
|
+
|
|
378
|
+
// P2-003: Detect architecture decision patterns in tool responses
|
|
379
|
+
if (toolResponse && detectDecisionPatterns(toolResponse)) {
|
|
380
|
+
const firstLine = toolResponse.split('\n')[0].slice(0, 200);
|
|
381
|
+
const title = `Architecture decision: ${firstLine}`;
|
|
382
|
+
const detail = toolResponse.slice(0, 1000);
|
|
383
|
+
return {
|
|
384
|
+
type: 'decision',
|
|
385
|
+
title,
|
|
386
|
+
detail,
|
|
387
|
+
visibility: classifyVisibility(title, detail),
|
|
388
|
+
opts: {
|
|
389
|
+
importance: assignImportance('decision'),
|
|
390
|
+
originalTokens: estimateTokens(toolResponse),
|
|
391
|
+
...extractLinkedReferences(toolResponse),
|
|
392
|
+
},
|
|
393
|
+
};
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
return classifyToolCall(tc);
|
|
397
|
+
}
|
|
398
|
+
|
|
399
|
+
/**
|
|
400
|
+
* Detect plan progress references in tool responses.
|
|
401
|
+
* Returns plan items that appear to be completed.
|
|
402
|
+
*/
|
|
403
|
+
export function detectPlanProgress(toolResponse: string): Array<{ planItem: string; status: string }> {
|
|
404
|
+
const results: Array<{ planItem: string; status: string }> = [];
|
|
405
|
+
const progressPattern = /(P\d+-\d+)\s*[:\-]?\s*(COMPLETE|PASS|DONE|complete|pass|done)/g;
|
|
406
|
+
let match;
|
|
407
|
+
while ((match = progressPattern.exec(toolResponse)) !== null) {
|
|
408
|
+
results.push({ planItem: match[1], status: 'complete' });
|
|
409
|
+
}
|
|
410
|
+
return results;
|
|
411
|
+
}
|
package/src/page-deps.ts
ADDED
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
// Copyright (c) 2026 Massu. All rights reserved.
|
|
2
|
+
// Licensed under BSL 1.1 - see LICENSE file for details.
|
|
3
|
+
|
|
4
|
+
import { readFileSync, existsSync } from 'fs';
|
|
5
|
+
import { resolve } from 'path';
|
|
6
|
+
import type Database from 'better-sqlite3';
|
|
7
|
+
import { getConfig, getProjectRoot } from './config.ts';
|
|
8
|
+
|
|
9
|
+
export interface PageChain {
|
|
10
|
+
page: string;
|
|
11
|
+
route: string;
|
|
12
|
+
portal: string;
|
|
13
|
+
components: string[];
|
|
14
|
+
hooks: string[];
|
|
15
|
+
routers: string[];
|
|
16
|
+
tables: string[];
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Derive the URL route from a Next.js page file path.
|
|
21
|
+
* e.g., src/app/orders/page.tsx -> /orders
|
|
22
|
+
* e.g., src/app/orders/[id]/page.tsx -> /orders/[id]
|
|
23
|
+
*/
|
|
24
|
+
export function deriveRoute(pageFile: string): string {
|
|
25
|
+
let route = pageFile
|
|
26
|
+
.replace(/^src\/app/, '')
|
|
27
|
+
.replace(/\/page\.tsx?$/, '')
|
|
28
|
+
.replace(/\/page\.jsx?$/, '');
|
|
29
|
+
return route || '/';
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Determine which portal/scope a page belongs to based on its route.
|
|
34
|
+
* Uses accessScopes from config if available, otherwise infers from route prefix.
|
|
35
|
+
*/
|
|
36
|
+
export function derivePortal(route: string): string {
|
|
37
|
+
const scopes = getConfig().accessScopes;
|
|
38
|
+
if (scopes && scopes.length > 0) {
|
|
39
|
+
for (const scope of scopes) {
|
|
40
|
+
if (route.startsWith('/' + scope)) return scope;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
// Fallback: use the first path segment as the scope
|
|
44
|
+
const parts = route.split('/').filter(Boolean);
|
|
45
|
+
return parts[0] ?? 'default';
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Recursively trace imports from a file, collecting components and hooks.
|
|
50
|
+
*/
|
|
51
|
+
function traceImports(
|
|
52
|
+
startFile: string,
|
|
53
|
+
dataDb: Database.Database,
|
|
54
|
+
visited: Set<string>,
|
|
55
|
+
components: Set<string>,
|
|
56
|
+
hooks: Set<string>,
|
|
57
|
+
maxDepth: number = 5
|
|
58
|
+
): void {
|
|
59
|
+
if (maxDepth <= 0 || visited.has(startFile)) return;
|
|
60
|
+
visited.add(startFile);
|
|
61
|
+
|
|
62
|
+
const imports = dataDb.prepare(
|
|
63
|
+
'SELECT target_file, imported_names FROM massu_imports WHERE source_file = ?'
|
|
64
|
+
).all(startFile) as { target_file: string; imported_names: string }[];
|
|
65
|
+
|
|
66
|
+
for (const imp of imports) {
|
|
67
|
+
const target = imp.target_file;
|
|
68
|
+
|
|
69
|
+
// Classify the import
|
|
70
|
+
if (target.includes('/components/')) {
|
|
71
|
+
components.add(target);
|
|
72
|
+
}
|
|
73
|
+
if (target.includes('/hooks/') || target.match(/use[A-Z]/)) {
|
|
74
|
+
hooks.add(target);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Recurse into local imports (not node_modules)
|
|
78
|
+
if (target.startsWith('src/')) {
|
|
79
|
+
traceImports(target, dataDb, visited, components, hooks, maxDepth - 1);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Find routers called by hooks/components via api.* patterns.
|
|
86
|
+
*/
|
|
87
|
+
function findRouterCalls(files: string[]): string[] {
|
|
88
|
+
const routers = new Set<string>();
|
|
89
|
+
const projectRoot = getProjectRoot();
|
|
90
|
+
|
|
91
|
+
for (const file of files) {
|
|
92
|
+
const absPath = resolve(projectRoot, file);
|
|
93
|
+
if (!existsSync(absPath)) continue;
|
|
94
|
+
|
|
95
|
+
try {
|
|
96
|
+
const source = readFileSync(absPath, 'utf-8');
|
|
97
|
+
const apiCallRegex = /api\.(\w+)\.\w+/g;
|
|
98
|
+
let match;
|
|
99
|
+
while ((match = apiCallRegex.exec(source)) !== null) {
|
|
100
|
+
routers.add(match[1]);
|
|
101
|
+
}
|
|
102
|
+
} catch {
|
|
103
|
+
// Skip unreadable component files
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
return [...routers];
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Find database tables touched by routers.
|
|
112
|
+
*/
|
|
113
|
+
function findTablesFromRouters(routerNames: string[], dataDb: Database.Database): string[] {
|
|
114
|
+
const tables = new Set<string>();
|
|
115
|
+
|
|
116
|
+
// Look up router files from the tRPC index
|
|
117
|
+
for (const routerName of routerNames) {
|
|
118
|
+
const procs = dataDb.prepare(
|
|
119
|
+
'SELECT DISTINCT router_file FROM massu_trpc_procedures WHERE router_name = ?'
|
|
120
|
+
).all(routerName) as { router_file: string }[];
|
|
121
|
+
|
|
122
|
+
for (const proc of procs) {
|
|
123
|
+
const absPath = resolve(getProjectRoot(), proc.router_file);
|
|
124
|
+
if (!existsSync(absPath)) continue;
|
|
125
|
+
|
|
126
|
+
try {
|
|
127
|
+
const source = readFileSync(absPath, 'utf-8');
|
|
128
|
+
// Match database access patterns from config (default: ctx.db.{table})
|
|
129
|
+
const dbPattern = getConfig().dbAccessPattern ?? 'ctx.db.{table}';
|
|
130
|
+
const regexStr = dbPattern
|
|
131
|
+
.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
|
132
|
+
.replace('\\{table\\}', '(\\w+)');
|
|
133
|
+
const tableRegex = new RegExp(regexStr + '\\.', 'g');
|
|
134
|
+
let match;
|
|
135
|
+
while ((match = tableRegex.exec(source)) !== null) {
|
|
136
|
+
tables.add(match[1]);
|
|
137
|
+
}
|
|
138
|
+
} catch {
|
|
139
|
+
// Skip unreadable source files
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
return [...tables];
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
/**
|
|
148
|
+
* Build page dependency chains for all page.tsx files.
|
|
149
|
+
*/
|
|
150
|
+
export function buildPageDeps(dataDb: Database.Database, codegraphDb: Database.Database): number {
|
|
151
|
+
// Clear existing data
|
|
152
|
+
dataDb.exec('DELETE FROM massu_page_deps');
|
|
153
|
+
|
|
154
|
+
// Find all page.tsx files from CodeGraph
|
|
155
|
+
const pages = codegraphDb.prepare(
|
|
156
|
+
"SELECT path FROM files WHERE path LIKE 'src/app/%/page.tsx' OR path = 'src/app/page.tsx'"
|
|
157
|
+
).all() as { path: string }[];
|
|
158
|
+
|
|
159
|
+
const insertStmt = dataDb.prepare(
|
|
160
|
+
'INSERT INTO massu_page_deps (page_file, route, portal, components, hooks, routers, tables_touched) VALUES (?, ?, ?, ?, ?, ?, ?)'
|
|
161
|
+
);
|
|
162
|
+
|
|
163
|
+
let count = 0;
|
|
164
|
+
|
|
165
|
+
const insertAll = dataDb.transaction(() => {
|
|
166
|
+
for (const page of pages) {
|
|
167
|
+
const route = deriveRoute(page.path);
|
|
168
|
+
const portal = derivePortal(route);
|
|
169
|
+
|
|
170
|
+
const visited = new Set<string>();
|
|
171
|
+
const components = new Set<string>();
|
|
172
|
+
const hooks = new Set<string>();
|
|
173
|
+
|
|
174
|
+
traceImports(page.path, dataDb, visited, components, hooks);
|
|
175
|
+
|
|
176
|
+
const allFiles = [...visited];
|
|
177
|
+
const routers = findRouterCalls(allFiles);
|
|
178
|
+
const tables = findTablesFromRouters(routers, dataDb);
|
|
179
|
+
|
|
180
|
+
insertStmt.run(
|
|
181
|
+
page.path,
|
|
182
|
+
route,
|
|
183
|
+
portal,
|
|
184
|
+
JSON.stringify([...components]),
|
|
185
|
+
JSON.stringify([...hooks]),
|
|
186
|
+
JSON.stringify(routers),
|
|
187
|
+
JSON.stringify(tables)
|
|
188
|
+
);
|
|
189
|
+
|
|
190
|
+
count++;
|
|
191
|
+
}
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
insertAll();
|
|
195
|
+
return count;
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
/**
|
|
199
|
+
* Get the dependency chain for a specific page.
|
|
200
|
+
*/
|
|
201
|
+
export function getPageChain(dataDb: Database.Database, pageFile: string): PageChain | null {
|
|
202
|
+
const row = dataDb.prepare('SELECT * FROM massu_page_deps WHERE page_file = ?').get(pageFile) as {
|
|
203
|
+
page_file: string; route: string; portal: string;
|
|
204
|
+
components: string; hooks: string; routers: string; tables_touched: string;
|
|
205
|
+
} | undefined;
|
|
206
|
+
|
|
207
|
+
if (!row) return null;
|
|
208
|
+
|
|
209
|
+
return {
|
|
210
|
+
page: row.page_file,
|
|
211
|
+
route: row.route,
|
|
212
|
+
portal: row.portal,
|
|
213
|
+
components: JSON.parse(row.components),
|
|
214
|
+
hooks: JSON.parse(row.hooks),
|
|
215
|
+
routers: JSON.parse(row.routers),
|
|
216
|
+
tables: JSON.parse(row.tables_touched),
|
|
217
|
+
};
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
/**
|
|
221
|
+
* Find all pages affected by a given file (reverse lookup).
|
|
222
|
+
*/
|
|
223
|
+
export function findAffectedPages(dataDb: Database.Database, file: string): PageChain[] {
|
|
224
|
+
// Check if this file is directly a page
|
|
225
|
+
const directPage = getPageChain(dataDb, file);
|
|
226
|
+
if (directPage) return [directPage];
|
|
227
|
+
|
|
228
|
+
// Find all pages that import this file (directly or transitively)
|
|
229
|
+
// First, find who imports this file
|
|
230
|
+
const importers = dataDb.prepare(
|
|
231
|
+
'SELECT source_file FROM massu_imports WHERE target_file = ?'
|
|
232
|
+
).all(file) as { source_file: string }[];
|
|
233
|
+
|
|
234
|
+
const affectedFiles = new Set<string>([file, ...importers.map(i => i.source_file)]);
|
|
235
|
+
|
|
236
|
+
// Walk up the import tree to find pages
|
|
237
|
+
let frontier = [...importers.map(i => i.source_file)];
|
|
238
|
+
const visited = new Set(frontier);
|
|
239
|
+
const maxDepth = 10;
|
|
240
|
+
let depth = 0;
|
|
241
|
+
|
|
242
|
+
while (frontier.length > 0 && depth < maxDepth) {
|
|
243
|
+
const next: string[] = [];
|
|
244
|
+
for (const f of frontier) {
|
|
245
|
+
const upstreamImporters = dataDb.prepare(
|
|
246
|
+
'SELECT source_file FROM massu_imports WHERE target_file = ?'
|
|
247
|
+
).all(f) as { source_file: string }[];
|
|
248
|
+
|
|
249
|
+
for (const imp of upstreamImporters) {
|
|
250
|
+
if (!visited.has(imp.source_file)) {
|
|
251
|
+
visited.add(imp.source_file);
|
|
252
|
+
affectedFiles.add(imp.source_file);
|
|
253
|
+
next.push(imp.source_file);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
frontier = next;
|
|
258
|
+
depth++;
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
// Now find which pages are in the affected set
|
|
262
|
+
const allPages = dataDb.prepare('SELECT * FROM massu_page_deps').all() as {
|
|
263
|
+
page_file: string; route: string; portal: string;
|
|
264
|
+
components: string; hooks: string; routers: string; tables_touched: string;
|
|
265
|
+
}[];
|
|
266
|
+
|
|
267
|
+
const results: PageChain[] = [];
|
|
268
|
+
for (const row of allPages) {
|
|
269
|
+
if (affectedFiles.has(row.page_file)) {
|
|
270
|
+
results.push({
|
|
271
|
+
page: row.page_file,
|
|
272
|
+
route: row.route,
|
|
273
|
+
portal: row.portal,
|
|
274
|
+
components: JSON.parse(row.components),
|
|
275
|
+
hooks: JSON.parse(row.hooks),
|
|
276
|
+
routers: JSON.parse(row.routers),
|
|
277
|
+
tables: JSON.parse(row.tables_touched),
|
|
278
|
+
});
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
return results;
|
|
283
|
+
}
|