@massu/core 0.1.2 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/commands/_shared-preamble.md +76 -0
- package/commands/massu-audit-deps.md +211 -0
- package/commands/massu-changelog.md +174 -0
- package/commands/massu-cleanup.md +315 -0
- package/commands/massu-commit.md +481 -0
- package/commands/massu-create-plan.md +752 -0
- package/commands/massu-dead-code.md +131 -0
- package/commands/massu-debug.md +484 -0
- package/commands/massu-deploy.md +91 -0
- package/commands/massu-deps.md +374 -0
- package/commands/massu-doc-gen.md +279 -0
- package/commands/massu-docs.md +364 -0
- package/commands/massu-estimate.md +313 -0
- package/commands/massu-golden-path.md +973 -0
- package/commands/massu-guide.md +167 -0
- package/commands/massu-hotfix.md +480 -0
- package/commands/massu-loop-playwright.md +837 -0
- package/commands/massu-loop.md +775 -0
- package/commands/massu-new-feature.md +511 -0
- package/commands/massu-parity.md +214 -0
- package/commands/massu-plan.md +456 -0
- package/commands/massu-push-light.md +207 -0
- package/commands/massu-push.md +434 -0
- package/commands/massu-refactor.md +410 -0
- package/commands/massu-release.md +363 -0
- package/commands/massu-review.md +238 -0
- package/commands/massu-simplify.md +281 -0
- package/commands/massu-status.md +278 -0
- package/commands/massu-tdd.md +201 -0
- package/commands/massu-test.md +516 -0
- package/commands/massu-verify-playwright.md +281 -0
- package/commands/massu-verify.md +667 -0
- package/dist/cli.js +12522 -0
- package/dist/hooks/cost-tracker.js +80 -5
- package/dist/hooks/post-edit-context.js +72 -6
- package/dist/hooks/post-tool-use.js +234 -57
- package/dist/hooks/pre-compact.js +144 -5
- package/dist/hooks/pre-delete-check.js +141 -11
- package/dist/hooks/quality-event.js +80 -5
- package/dist/hooks/security-gate.js +29 -0
- package/dist/hooks/session-end.js +83 -8
- package/dist/hooks/session-start.js +153 -7
- package/dist/hooks/user-prompt.js +166 -5
- package/package.json +6 -5
- package/src/backfill-sessions.ts +5 -4
- package/src/cli.ts +6 -0
- package/src/commands/doctor.ts +193 -6
- package/src/commands/init.ts +235 -6
- package/src/commands/install-commands.ts +137 -0
- package/src/config.ts +68 -2
- package/src/db.ts +115 -2
- package/src/docs-tools.ts +8 -6
- package/src/hooks/post-edit-context.ts +1 -1
- package/src/hooks/post-tool-use.ts +130 -0
- package/src/hooks/pre-compact.ts +23 -1
- package/src/hooks/pre-delete-check.ts +92 -4
- package/src/hooks/security-gate.ts +32 -0
- package/src/hooks/session-start.ts +97 -4
- package/src/hooks/user-prompt.ts +46 -1
- package/src/import-resolver.ts +2 -1
- package/src/knowledge-db.ts +169 -0
- package/src/knowledge-indexer.ts +704 -0
- package/src/knowledge-tools.ts +1413 -0
- package/src/license.ts +482 -0
- package/src/memory-db.ts +14 -1
- package/src/observation-extractor.ts +11 -4
- package/src/page-deps.ts +3 -2
- package/src/python/coupling-detector.ts +124 -0
- package/src/python/domain-enforcer.ts +83 -0
- package/src/python/impact-analyzer.ts +95 -0
- package/src/python/import-parser.ts +244 -0
- package/src/python/import-resolver.ts +135 -0
- package/src/python/migration-indexer.ts +115 -0
- package/src/python/migration-parser.ts +332 -0
- package/src/python/model-indexer.ts +70 -0
- package/src/python/model-parser.ts +279 -0
- package/src/python/route-indexer.ts +58 -0
- package/src/python/route-parser.ts +317 -0
- package/src/python-tools.ts +629 -0
- package/src/sentinel-db.ts +2 -1
- package/src/server.ts +29 -6
- package/src/session-archiver.ts +4 -5
- package/src/tools.ts +283 -31
- package/README.md +0 -40
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
// Copyright (c) 2026 Massu. All rights reserved.
|
|
2
|
+
// Licensed under BSL 1.1 - see LICENSE file for details.
|
|
3
|
+
|
|
4
|
+
import type Database from 'better-sqlite3';
|
|
5
|
+
import { getConfig } from '../config.ts';
|
|
6
|
+
|
|
7
|
+
export interface DomainViolation {
|
|
8
|
+
sourceFile: string;
|
|
9
|
+
sourceDomain: string;
|
|
10
|
+
targetFile: string;
|
|
11
|
+
targetDomain: string;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Classify a Python file into its domain based on python.domains config.
|
|
16
|
+
*/
|
|
17
|
+
export function classifyPythonFileDomain(file: string): string {
|
|
18
|
+
const config = getConfig();
|
|
19
|
+
const domains = config.python?.domains || [];
|
|
20
|
+
const pythonRoot = config.python?.root || '';
|
|
21
|
+
|
|
22
|
+
let modulePath = file;
|
|
23
|
+
if (pythonRoot && modulePath.startsWith(pythonRoot + '/')) {
|
|
24
|
+
modulePath = modulePath.slice(pythonRoot.length + 1);
|
|
25
|
+
}
|
|
26
|
+
modulePath = modulePath.replace(/\/__init__\.py$/, '').replace(/\.py$/, '').replace(/\//g, '.');
|
|
27
|
+
|
|
28
|
+
for (const domain of domains) {
|
|
29
|
+
for (const pkg of domain.packages) {
|
|
30
|
+
if (modulePath.startsWith(pkg) || modulePath === pkg) {
|
|
31
|
+
return domain.name;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
return 'Unknown';
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Find all cross-domain import violations in the Python import graph.
|
|
40
|
+
*/
|
|
41
|
+
export function findPythonDomainViolations(dataDb: Database.Database): DomainViolation[] {
|
|
42
|
+
const config = getConfig();
|
|
43
|
+
const domains = config.python?.domains || [];
|
|
44
|
+
if (domains.length === 0) return [];
|
|
45
|
+
|
|
46
|
+
const imports = dataDb.prepare(
|
|
47
|
+
'SELECT source_file, target_file FROM massu_py_imports'
|
|
48
|
+
).all() as { source_file: string; target_file: string }[];
|
|
49
|
+
|
|
50
|
+
const violations: DomainViolation[] = [];
|
|
51
|
+
|
|
52
|
+
for (const imp of imports) {
|
|
53
|
+
const srcDomain = classifyPythonFileDomain(imp.source_file);
|
|
54
|
+
const tgtDomain = classifyPythonFileDomain(imp.target_file);
|
|
55
|
+
|
|
56
|
+
if (srcDomain === tgtDomain || srcDomain === 'Unknown' || tgtDomain === 'Unknown') continue;
|
|
57
|
+
|
|
58
|
+
const srcConfig = domains.find(d => d.name === srcDomain);
|
|
59
|
+
if (srcConfig && !srcConfig.allowed_imports_from.includes(tgtDomain)) {
|
|
60
|
+
violations.push({
|
|
61
|
+
sourceFile: imp.source_file,
|
|
62
|
+
sourceDomain: srcDomain,
|
|
63
|
+
targetFile: imp.target_file,
|
|
64
|
+
targetDomain: tgtDomain,
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
return violations;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Get all Python files in a specific domain.
|
|
74
|
+
*/
|
|
75
|
+
export function getPythonFilesInDomain(dataDb: Database.Database, domainName: string): string[] {
|
|
76
|
+
const allFiles = dataDb.prepare(
|
|
77
|
+
'SELECT DISTINCT source_file as f FROM massu_py_imports UNION SELECT DISTINCT target_file as f FROM massu_py_imports'
|
|
78
|
+
).all() as { f: string }[];
|
|
79
|
+
|
|
80
|
+
return allFiles
|
|
81
|
+
.map(row => row.f)
|
|
82
|
+
.filter(f => classifyPythonFileDomain(f) === domainName);
|
|
83
|
+
}
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
// Copyright (c) 2026 Massu. All rights reserved.
|
|
2
|
+
// Licensed under BSL 1.1 - see LICENSE file for details.
|
|
3
|
+
|
|
4
|
+
import type Database from 'better-sqlite3';
|
|
5
|
+
import { classifyPythonFileDomain } from './domain-enforcer.ts';
|
|
6
|
+
|
|
7
|
+
export interface PythonImpactReport {
|
|
8
|
+
file: string;
|
|
9
|
+
domain: string;
|
|
10
|
+
importedBy: string[];
|
|
11
|
+
routes: { method: string; path: string; functionName: string }[];
|
|
12
|
+
models: { className: string; tableName: string | null }[];
|
|
13
|
+
frontendCallers: string[];
|
|
14
|
+
domainCrossings: { file: string; domain: string }[];
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Full impact analysis for a Python file.
|
|
19
|
+
* Cross-references import graph, routes, models, and frontend coupling.
|
|
20
|
+
*/
|
|
21
|
+
export function analyzePythonImpact(dataDb: Database.Database, file: string): PythonImpactReport {
|
|
22
|
+
const domain = classifyPythonFileDomain(file);
|
|
23
|
+
|
|
24
|
+
// 1. Who imports this file (direct + transitive)
|
|
25
|
+
const importedBy = collectTransitiveImporters(dataDb, file, 5);
|
|
26
|
+
|
|
27
|
+
// 2. Routes defined in this file
|
|
28
|
+
const routes = dataDb.prepare(
|
|
29
|
+
'SELECT method, path, function_name FROM massu_py_routes WHERE file = ?'
|
|
30
|
+
).all(file) as { method: string; path: string; function_name: string }[];
|
|
31
|
+
|
|
32
|
+
// 3. Models defined in this file
|
|
33
|
+
const models = dataDb.prepare(
|
|
34
|
+
'SELECT class_name, table_name FROM massu_py_models WHERE file = ?'
|
|
35
|
+
).all(file) as { class_name: string; table_name: string | null }[];
|
|
36
|
+
|
|
37
|
+
// 4. Frontend callers (via routes in this file)
|
|
38
|
+
const routeIds = dataDb.prepare('SELECT id FROM massu_py_routes WHERE file = ?').all(file) as { id: number }[];
|
|
39
|
+
const frontendCallers: string[] = [];
|
|
40
|
+
if (routeIds.length > 0) {
|
|
41
|
+
const placeholders = routeIds.map(() => '?').join(',');
|
|
42
|
+
const callers = dataDb.prepare(
|
|
43
|
+
`SELECT DISTINCT frontend_file FROM massu_py_route_callers WHERE route_id IN (${placeholders})`
|
|
44
|
+
).all(...routeIds.map(r => r.id)) as { frontend_file: string }[];
|
|
45
|
+
frontendCallers.push(...callers.map(c => c.frontend_file));
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// 5. Domain crossings
|
|
49
|
+
const imports = dataDb.prepare(
|
|
50
|
+
'SELECT target_file FROM massu_py_imports WHERE source_file = ?'
|
|
51
|
+
).all(file) as { target_file: string }[];
|
|
52
|
+
|
|
53
|
+
const domainCrossings = imports
|
|
54
|
+
.map(imp => ({ file: imp.target_file, domain: classifyPythonFileDomain(imp.target_file) }))
|
|
55
|
+
.filter(imp => imp.domain !== domain && imp.domain !== 'Unknown');
|
|
56
|
+
|
|
57
|
+
return {
|
|
58
|
+
file,
|
|
59
|
+
domain,
|
|
60
|
+
importedBy,
|
|
61
|
+
routes: routes.map(r => ({ method: r.method, path: r.path, functionName: r.function_name })),
|
|
62
|
+
models: models.map(m => ({ className: m.class_name, tableName: m.table_name })),
|
|
63
|
+
frontendCallers,
|
|
64
|
+
domainCrossings,
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
function collectTransitiveImporters(dataDb: Database.Database, file: string, maxDepth: number): string[] {
|
|
69
|
+
const visited = new Set<string>();
|
|
70
|
+
const queue = [file];
|
|
71
|
+
let depth = 0;
|
|
72
|
+
|
|
73
|
+
const importerStmt = dataDb.prepare(
|
|
74
|
+
'SELECT source_file FROM massu_py_imports WHERE target_file = ?'
|
|
75
|
+
);
|
|
76
|
+
|
|
77
|
+
while (queue.length > 0 && depth < maxDepth) {
|
|
78
|
+
const batch = [...queue];
|
|
79
|
+
queue.length = 0;
|
|
80
|
+
for (const f of batch) {
|
|
81
|
+
if (visited.has(f)) continue;
|
|
82
|
+
visited.add(f);
|
|
83
|
+
const importers = importerStmt.all(f) as { source_file: string }[];
|
|
84
|
+
for (const imp of importers) {
|
|
85
|
+
if (!visited.has(imp.source_file)) {
|
|
86
|
+
queue.push(imp.source_file);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
depth++;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
visited.delete(file); // Don't include the file itself
|
|
94
|
+
return [...visited];
|
|
95
|
+
}
|
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
// Copyright (c) 2026 Massu. All rights reserved.
|
|
2
|
+
// Licensed under BSL 1.1 - see LICENSE file for details.
|
|
3
|
+
|
|
4
|
+
// ============================================================
|
|
5
|
+
// Python Import Statement Parser
|
|
6
|
+
// ============================================================
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Represents a single parsed Python import statement.
|
|
10
|
+
*/
|
|
11
|
+
export interface PythonImport {
|
|
12
|
+
/** The kind of import: plain absolute, plain relative, from-absolute, or from-relative. */
|
|
13
|
+
type: 'absolute' | 'relative' | 'from_absolute' | 'from_relative';
|
|
14
|
+
/** The module path, e.g. "os.path" or "..utils". */
|
|
15
|
+
module: string;
|
|
16
|
+
/** Imported names (empty for plain `import x` statements). */
|
|
17
|
+
names: string[];
|
|
18
|
+
/** Alias when `import x as alias` is used. */
|
|
19
|
+
alias?: string;
|
|
20
|
+
/** Relative import level: 0 for absolute, 1 for `.`, 2 for `..`, etc. */
|
|
21
|
+
level: number;
|
|
22
|
+
/** 1-based line number where the import statement begins. */
|
|
23
|
+
line: number;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Parse all Python import statements from source code.
|
|
28
|
+
*
|
|
29
|
+
* Handles:
|
|
30
|
+
* - `import x`, `import x.y.z`, `import x as alias`, `import x, y, z`
|
|
31
|
+
* - `from x import y`, `from x import y, z`, `from x import *`
|
|
32
|
+
* - `from . import x`, `from ..x import y`, `from ...x.y import z`
|
|
33
|
+
* - Multi-line parenthesized imports: `from x import (\n a,\n b\n)`
|
|
34
|
+
* - Skips `if TYPE_CHECKING:` blocks
|
|
35
|
+
* - Strips comments
|
|
36
|
+
*
|
|
37
|
+
* @param source - Python source code
|
|
38
|
+
* @returns Array of parsed imports in source order
|
|
39
|
+
*/
|
|
40
|
+
export function parsePythonImports(source: string): PythonImport[] {
|
|
41
|
+
const lines = source.split('\n');
|
|
42
|
+
const results: PythonImport[] = [];
|
|
43
|
+
|
|
44
|
+
/** State machine modes. */
|
|
45
|
+
type Mode = 'normal' | 'multiline' | 'type_checking';
|
|
46
|
+
let mode: Mode = 'normal';
|
|
47
|
+
|
|
48
|
+
// Multiline accumulation state
|
|
49
|
+
let multilineBuffer = '';
|
|
50
|
+
let multilineStartLine = 0;
|
|
51
|
+
|
|
52
|
+
// TYPE_CHECKING block tracking
|
|
53
|
+
let typeCheckingIndent = -1;
|
|
54
|
+
|
|
55
|
+
for (let i = 0; i < lines.length; i++) {
|
|
56
|
+
const rawLine = lines[i];
|
|
57
|
+
const lineNum = i + 1; // 1-based
|
|
58
|
+
|
|
59
|
+
// ── TYPE_CHECKING block detection ──────────────────────
|
|
60
|
+
if (mode === 'normal') {
|
|
61
|
+
const tcMatch = rawLine.match(/^(\s*)if\s+TYPE_CHECKING\s*:/);
|
|
62
|
+
if (tcMatch) {
|
|
63
|
+
mode = 'type_checking';
|
|
64
|
+
typeCheckingIndent = tcMatch[1].length;
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
if (mode === 'type_checking') {
|
|
70
|
+
// Stay in type_checking mode until we see a line that is:
|
|
71
|
+
// - non-empty, non-comment, and at the same or lesser indentation
|
|
72
|
+
const stripped = rawLine.replace(/#.*$/, '').trimEnd();
|
|
73
|
+
if (stripped.length === 0) continue; // blank or comment-only line
|
|
74
|
+
const currentIndent = rawLine.match(/^(\s*)/)?.[1].length ?? 0;
|
|
75
|
+
if (currentIndent <= typeCheckingIndent) {
|
|
76
|
+
// Dedented — exit TYPE_CHECKING block, process this line normally
|
|
77
|
+
mode = 'normal';
|
|
78
|
+
typeCheckingIndent = -1;
|
|
79
|
+
// Fall through to normal processing below
|
|
80
|
+
} else {
|
|
81
|
+
continue; // Still inside TYPE_CHECKING block
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// ── Multiline continuation ─────────────────────────────
|
|
86
|
+
if (mode === 'multiline') {
|
|
87
|
+
const cleaned = stripComment(rawLine);
|
|
88
|
+
multilineBuffer += ' ' + cleaned.trim();
|
|
89
|
+
if (cleaned.includes(')')) {
|
|
90
|
+
// Close the multiline import
|
|
91
|
+
mode = 'normal';
|
|
92
|
+
const parsed = parseFromImportLine(multilineBuffer, multilineStartLine);
|
|
93
|
+
if (parsed) results.push(parsed);
|
|
94
|
+
multilineBuffer = '';
|
|
95
|
+
}
|
|
96
|
+
continue;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// ── Normal mode ────────────────────────────────────────
|
|
100
|
+
const line = stripComment(rawLine).trim();
|
|
101
|
+
|
|
102
|
+
// Skip blank lines
|
|
103
|
+
if (line.length === 0) continue;
|
|
104
|
+
|
|
105
|
+
// Detect multiline from-import with opening paren but no closing paren
|
|
106
|
+
if (line.startsWith('from ') && line.includes('(') && !line.includes(')')) {
|
|
107
|
+
mode = 'multiline';
|
|
108
|
+
multilineBuffer = line;
|
|
109
|
+
multilineStartLine = lineNum;
|
|
110
|
+
continue;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// from ... import ...
|
|
114
|
+
if (line.startsWith('from ')) {
|
|
115
|
+
const parsed = parseFromImportLine(line, lineNum);
|
|
116
|
+
if (parsed) results.push(parsed);
|
|
117
|
+
continue;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// import ...
|
|
121
|
+
if (line.startsWith('import ')) {
|
|
122
|
+
const imports = parsePlainImportLine(line, lineNum);
|
|
123
|
+
results.push(...imports);
|
|
124
|
+
continue;
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
return results;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// ============================================================
|
|
132
|
+
// Internal helpers
|
|
133
|
+
// ============================================================
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* Strip an inline `# comment` from a line, respecting strings minimally.
|
|
137
|
+
* For import lines this is sufficient since import syntax doesn't contain `#`.
|
|
138
|
+
*/
|
|
139
|
+
function stripComment(line: string): string {
|
|
140
|
+
// If the line is a pure comment, return empty
|
|
141
|
+
const trimmed = line.trimStart();
|
|
142
|
+
if (trimmed.startsWith('#')) return '';
|
|
143
|
+
|
|
144
|
+
const hashIdx = line.indexOf('#');
|
|
145
|
+
if (hashIdx === -1) return line;
|
|
146
|
+
return line.slice(0, hashIdx);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
/**
|
|
150
|
+
* Count leading dots in a module string and return the level + remaining module.
|
|
151
|
+
*/
|
|
152
|
+
function splitRelativePrefix(raw: string): { level: number; rest: string } {
|
|
153
|
+
let level = 0;
|
|
154
|
+
while (level < raw.length && raw[level] === '.') {
|
|
155
|
+
level++;
|
|
156
|
+
}
|
|
157
|
+
return { level, rest: raw.slice(level) };
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
/**
|
|
161
|
+
* Parse a `from X import Y` line (possibly with parentheses already joined).
|
|
162
|
+
*/
|
|
163
|
+
function parseFromImportLine(line: string, lineNum: number): PythonImport | null {
|
|
164
|
+
// Normalize: remove parens, collapse whitespace
|
|
165
|
+
const cleaned = line
|
|
166
|
+
.replace(/[()]/g, '')
|
|
167
|
+
.replace(/\s+/g, ' ')
|
|
168
|
+
.trim();
|
|
169
|
+
|
|
170
|
+
// Pattern: from <module> import <names>
|
|
171
|
+
const match = cleaned.match(/^from\s+(\S+)\s+import\s+(.+)$/);
|
|
172
|
+
if (!match) return null;
|
|
173
|
+
|
|
174
|
+
const rawModule = match[1];
|
|
175
|
+
const namesStr = match[2];
|
|
176
|
+
|
|
177
|
+
const { level, rest } = splitRelativePrefix(rawModule);
|
|
178
|
+
const isRelative = level > 0;
|
|
179
|
+
|
|
180
|
+
const module = rawModule;
|
|
181
|
+
|
|
182
|
+
// Parse names: split by comma, trim, handle `name as alias` per name
|
|
183
|
+
const names = namesStr
|
|
184
|
+
.split(',')
|
|
185
|
+
.map((n) => n.trim())
|
|
186
|
+
.filter((n) => n.length > 0)
|
|
187
|
+
.map((n) => {
|
|
188
|
+
// Handle `name as alias` — store just the name in the names array
|
|
189
|
+
const asMatch = n.match(/^(\S+)\s+as\s+(\S+)$/);
|
|
190
|
+
return asMatch ? asMatch[1] : n;
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
return {
|
|
194
|
+
type: isRelative ? 'from_relative' : 'from_absolute',
|
|
195
|
+
module,
|
|
196
|
+
names,
|
|
197
|
+
level,
|
|
198
|
+
line: lineNum,
|
|
199
|
+
};
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
/**
|
|
203
|
+
* Parse a plain `import X` line, which may contain multiple comma-separated modules.
|
|
204
|
+
*
|
|
205
|
+
* Examples:
|
|
206
|
+
* - `import os` → one result
|
|
207
|
+
* - `import os, sys, re` → three results
|
|
208
|
+
* - `import os.path as osp` → one result with alias
|
|
209
|
+
*/
|
|
210
|
+
function parsePlainImportLine(line: string, lineNum: number): PythonImport[] {
|
|
211
|
+
const results: PythonImport[] = [];
|
|
212
|
+
|
|
213
|
+
// Strip leading "import "
|
|
214
|
+
const rest = line.replace(/^import\s+/, '');
|
|
215
|
+
|
|
216
|
+
// Split by comma
|
|
217
|
+
const parts = rest.split(',').map((p) => p.trim()).filter((p) => p.length > 0);
|
|
218
|
+
|
|
219
|
+
for (const part of parts) {
|
|
220
|
+
// Check for `module as alias`
|
|
221
|
+
const asMatch = part.match(/^(\S+)\s+as\s+(\S+)$/);
|
|
222
|
+
const moduleName = asMatch ? asMatch[1] : part;
|
|
223
|
+
const alias = asMatch ? asMatch[2] : undefined;
|
|
224
|
+
|
|
225
|
+
const { level } = splitRelativePrefix(moduleName);
|
|
226
|
+
const isRelative = level > 0;
|
|
227
|
+
|
|
228
|
+
const imp: PythonImport = {
|
|
229
|
+
type: isRelative ? 'relative' : 'absolute',
|
|
230
|
+
module: moduleName,
|
|
231
|
+
names: [],
|
|
232
|
+
level,
|
|
233
|
+
line: lineNum,
|
|
234
|
+
};
|
|
235
|
+
|
|
236
|
+
if (alias !== undefined) {
|
|
237
|
+
imp.alias = alias;
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
results.push(imp);
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
return results;
|
|
244
|
+
}
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
// Copyright (c) 2026 Massu. All rights reserved.
|
|
2
|
+
// Licensed under BSL 1.1 - see LICENSE file for details.
|
|
3
|
+
|
|
4
|
+
import { readFileSync, existsSync, readdirSync } from 'fs';
|
|
5
|
+
import { resolve, join, relative, dirname } from 'path';
|
|
6
|
+
import type Database from 'better-sqlite3';
|
|
7
|
+
import { parsePythonImports } from './import-parser.ts';
|
|
8
|
+
import { getProjectRoot } from '../config.ts';
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Resolve a Python module path to a file path.
|
|
12
|
+
* Checks both module.py and module/__init__.py.
|
|
13
|
+
* Returns path relative to project root, or null for external modules.
|
|
14
|
+
*/
|
|
15
|
+
export function resolvePythonModulePath(module: string, fromFile: string, pythonRoot: string, level: number): string | null {
|
|
16
|
+
const projectRoot = getProjectRoot();
|
|
17
|
+
|
|
18
|
+
if (level > 0) {
|
|
19
|
+
// Relative import - resolve from current file's directory
|
|
20
|
+
let baseDir = dirname(resolve(projectRoot, fromFile));
|
|
21
|
+
for (let i = 1; i < level; i++) {
|
|
22
|
+
baseDir = dirname(baseDir);
|
|
23
|
+
}
|
|
24
|
+
// Strip the dots prefix to get the actual module part
|
|
25
|
+
const modulePart = module.replace(/^\.+/, '');
|
|
26
|
+
if (modulePart) {
|
|
27
|
+
const parts = modulePart.split('.');
|
|
28
|
+
return tryResolvePythonPath(join(baseDir, ...parts), projectRoot);
|
|
29
|
+
}
|
|
30
|
+
// `from . import x` - the module is the current package
|
|
31
|
+
return tryResolvePythonPath(baseDir, projectRoot);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Absolute import
|
|
35
|
+
const parts = module.split('.');
|
|
36
|
+
const candidate = join(resolve(projectRoot, pythonRoot), ...parts);
|
|
37
|
+
return tryResolvePythonPath(candidate, projectRoot);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function tryResolvePythonPath(basePath: string, projectRoot: string): string | null {
|
|
41
|
+
// Try as file: module.py
|
|
42
|
+
if (existsSync(basePath + '.py')) {
|
|
43
|
+
return relative(projectRoot, basePath + '.py');
|
|
44
|
+
}
|
|
45
|
+
// Try as package: module/__init__.py
|
|
46
|
+
if (existsSync(join(basePath, '__init__.py'))) {
|
|
47
|
+
return relative(projectRoot, join(basePath, '__init__.py'));
|
|
48
|
+
}
|
|
49
|
+
// Try exact path (already has .py)
|
|
50
|
+
if (basePath.endsWith('.py') && existsSync(basePath)) {
|
|
51
|
+
return relative(projectRoot, basePath);
|
|
52
|
+
}
|
|
53
|
+
return null;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Walk directory recursively, collecting .py files.
|
|
58
|
+
* Skips excluded directories.
|
|
59
|
+
*/
|
|
60
|
+
function walkPythonFiles(dir: string, excludeDirs: string[]): string[] {
|
|
61
|
+
const files: string[] = [];
|
|
62
|
+
try {
|
|
63
|
+
const entries = readdirSync(dir, { withFileTypes: true });
|
|
64
|
+
for (const entry of entries) {
|
|
65
|
+
if (entry.isDirectory()) {
|
|
66
|
+
if (excludeDirs.includes(entry.name)) continue;
|
|
67
|
+
files.push(...walkPythonFiles(join(dir, entry.name), excludeDirs));
|
|
68
|
+
} else if (entry.name.endsWith('.py')) {
|
|
69
|
+
files.push(join(dir, entry.name));
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
} catch { /* directory may not exist */ }
|
|
73
|
+
return files;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Build the Python import graph for all .py files under pythonRoot.
|
|
78
|
+
* Stores results in massu_py_imports table.
|
|
79
|
+
*/
|
|
80
|
+
export function buildPythonImportIndex(dataDb: Database.Database, pythonRoot: string, excludeDirs: string[] = ['__pycache__', '.venv', 'venv', '.mypy_cache', '.pytest_cache']): number {
|
|
81
|
+
const projectRoot = getProjectRoot();
|
|
82
|
+
const absRoot = resolve(projectRoot, pythonRoot);
|
|
83
|
+
|
|
84
|
+
// Clear existing Python import edges
|
|
85
|
+
dataDb.exec('DELETE FROM massu_py_imports');
|
|
86
|
+
|
|
87
|
+
const insertStmt = dataDb.prepare(
|
|
88
|
+
'INSERT INTO massu_py_imports (source_file, target_file, import_type, imported_names, line) VALUES (?, ?, ?, ?, ?)'
|
|
89
|
+
);
|
|
90
|
+
|
|
91
|
+
const files = walkPythonFiles(absRoot, excludeDirs);
|
|
92
|
+
let edgeCount = 0;
|
|
93
|
+
|
|
94
|
+
const insertMany = dataDb.transaction((edges: { source: string; target: string; type: string; names: string; line: number }[]) => {
|
|
95
|
+
for (const edge of edges) {
|
|
96
|
+
insertStmt.run(edge.source, edge.target, edge.type, edge.names, edge.line);
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
|
|
100
|
+
const batch: { source: string; target: string; type: string; names: string; line: number }[] = [];
|
|
101
|
+
|
|
102
|
+
for (const absFile of files) {
|
|
103
|
+
const relFile = relative(projectRoot, absFile);
|
|
104
|
+
let source: string;
|
|
105
|
+
try {
|
|
106
|
+
source = readFileSync(absFile, 'utf-8');
|
|
107
|
+
} catch { continue; }
|
|
108
|
+
|
|
109
|
+
const imports = parsePythonImports(source);
|
|
110
|
+
|
|
111
|
+
for (const imp of imports) {
|
|
112
|
+
const targetPath = resolvePythonModulePath(imp.module, relFile, pythonRoot, imp.level);
|
|
113
|
+
if (!targetPath) continue; // Skip external/stdlib
|
|
114
|
+
|
|
115
|
+
batch.push({
|
|
116
|
+
source: relFile,
|
|
117
|
+
target: targetPath,
|
|
118
|
+
type: imp.type,
|
|
119
|
+
names: JSON.stringify(imp.names),
|
|
120
|
+
line: imp.line,
|
|
121
|
+
});
|
|
122
|
+
edgeCount++;
|
|
123
|
+
|
|
124
|
+
if (batch.length >= 500) {
|
|
125
|
+
insertMany(batch.splice(0));
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
if (batch.length > 0) {
|
|
131
|
+
insertMany(batch);
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
return edgeCount;
|
|
135
|
+
}
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
// Copyright (c) 2026 Massu. All rights reserved.
|
|
2
|
+
// Licensed under BSL 1.1 - see LICENSE file for details.
|
|
3
|
+
|
|
4
|
+
import { readFileSync, readdirSync } from 'fs';
|
|
5
|
+
import { join, relative } from 'path';
|
|
6
|
+
import type Database from 'better-sqlite3';
|
|
7
|
+
import { parseAlembicMigration } from './migration-parser.ts';
|
|
8
|
+
import { getProjectRoot } from '../config.ts';
|
|
9
|
+
|
|
10
|
+
export function buildPythonMigrationIndex(dataDb: Database.Database, alembicDir: string): number {
|
|
11
|
+
const projectRoot = getProjectRoot();
|
|
12
|
+
const absDir = join(projectRoot, alembicDir);
|
|
13
|
+
dataDb.exec('DELETE FROM massu_py_migrations');
|
|
14
|
+
|
|
15
|
+
// Look for version files in versions/ subdirectory
|
|
16
|
+
const versionsDir = join(absDir, 'versions');
|
|
17
|
+
let files: string[] = [];
|
|
18
|
+
try {
|
|
19
|
+
files = readdirSync(versionsDir)
|
|
20
|
+
.filter(f => f.endsWith('.py'))
|
|
21
|
+
.map(f => join(versionsDir, f));
|
|
22
|
+
} catch { /* versions/ subdir not found, try parent */
|
|
23
|
+
try {
|
|
24
|
+
files = readdirSync(absDir)
|
|
25
|
+
.filter(f => f.endsWith('.py') && f !== 'env.py')
|
|
26
|
+
.map(f => join(absDir, f));
|
|
27
|
+
} catch { /* alembic dir not readable, skip */ }
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const insertStmt = dataDb.prepare(
|
|
31
|
+
'INSERT INTO massu_py_migrations (revision, down_revision, file, description, operations, is_head) VALUES (?, ?, ?, ?, ?, ?)'
|
|
32
|
+
);
|
|
33
|
+
|
|
34
|
+
let count = 0;
|
|
35
|
+
const allRevisions: Set<string> = new Set();
|
|
36
|
+
const hasDownRef: Set<string> = new Set();
|
|
37
|
+
|
|
38
|
+
// First pass: parse all migrations
|
|
39
|
+
interface MigRow { revision: string; downRevision: string | null; file: string; description: string | null; operations: string }
|
|
40
|
+
const rows: MigRow[] = [];
|
|
41
|
+
|
|
42
|
+
for (const absFile of files) {
|
|
43
|
+
let source: string;
|
|
44
|
+
try { source = readFileSync(absFile, 'utf-8'); } catch { continue; }
|
|
45
|
+
|
|
46
|
+
const parsed = parseAlembicMigration(source);
|
|
47
|
+
if (!parsed.revision) continue;
|
|
48
|
+
|
|
49
|
+
allRevisions.add(parsed.revision);
|
|
50
|
+
if (parsed.downRevision) hasDownRef.add(parsed.downRevision);
|
|
51
|
+
|
|
52
|
+
rows.push({
|
|
53
|
+
revision: parsed.revision,
|
|
54
|
+
downRevision: parsed.downRevision,
|
|
55
|
+
file: relative(projectRoot, absFile),
|
|
56
|
+
description: parsed.description,
|
|
57
|
+
operations: JSON.stringify(parsed.operations),
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Determine heads (revisions not referenced as down_revision by anyone)
|
|
62
|
+
dataDb.transaction(() => {
|
|
63
|
+
for (const row of rows) {
|
|
64
|
+
const isHead = !hasDownRef.has(row.revision) ? 1 : 0;
|
|
65
|
+
insertStmt.run(row.revision, row.downRevision, row.file, row.description, row.operations, isHead);
|
|
66
|
+
count++;
|
|
67
|
+
}
|
|
68
|
+
})();
|
|
69
|
+
|
|
70
|
+
return count;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Detect drift between SQLAlchemy models and migration state.
|
|
75
|
+
*/
|
|
76
|
+
export interface DriftReport {
|
|
77
|
+
unmigratedModels: { className: string; tableName: string }[];
|
|
78
|
+
missingColumns: { model: string; column: string }[];
|
|
79
|
+
extraMigrations: string[];
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export function detectMigrationDrift(dataDb: Database.Database): DriftReport {
|
|
83
|
+
const models = dataDb.prepare('SELECT class_name, table_name, columns FROM massu_py_models WHERE table_name IS NOT NULL').all() as {
|
|
84
|
+
class_name: string; table_name: string; columns: string;
|
|
85
|
+
}[];
|
|
86
|
+
|
|
87
|
+
const migrations = dataDb.prepare('SELECT operations FROM massu_py_migrations').all() as { operations: string }[];
|
|
88
|
+
|
|
89
|
+
// Collect all tables and columns mentioned in migrations
|
|
90
|
+
const migratedTables = new Set<string>();
|
|
91
|
+
const migratedColumns = new Map<string, Set<string>>();
|
|
92
|
+
|
|
93
|
+
for (const mig of migrations) {
|
|
94
|
+
let ops: { table?: string; column?: string }[];
|
|
95
|
+
try { ops = JSON.parse(mig.operations); } catch { ops = []; }
|
|
96
|
+
for (const op of ops) {
|
|
97
|
+
if (op.table) {
|
|
98
|
+
migratedTables.add(op.table);
|
|
99
|
+
if (!migratedColumns.has(op.table)) migratedColumns.set(op.table, new Set());
|
|
100
|
+
if (op.column) migratedColumns.get(op.table)!.add(op.column);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
const unmigratedModels: DriftReport['unmigratedModels'] = [];
|
|
106
|
+
const missingColumns: DriftReport['missingColumns'] = [];
|
|
107
|
+
|
|
108
|
+
for (const model of models) {
|
|
109
|
+
if (!migratedTables.has(model.table_name)) {
|
|
110
|
+
unmigratedModels.push({ className: model.class_name, tableName: model.table_name });
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
return { unmigratedModels, missingColumns, extraMigrations: [] };
|
|
115
|
+
}
|