invar-tools 1.17.12__py3-none-any.whl → 1.17.21__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- invar/mcp/handlers.py +58 -2
- invar/node_tools/eslint-plugin/cli.js +105 -31
- invar/node_tools/eslint-plugin/rules/require-schema-validation.js +80 -66
- invar/shell/commands/guard.py +46 -6
- invar/shell/config.py +38 -5
- invar/shell/git.py +10 -11
- invar/shell/guard_helpers.py +69 -41
- invar/shell/property_tests.py +85 -38
- invar/shell/prove/crosshair.py +19 -12
- invar/shell/prove/guard_ts.py +39 -17
- invar/shell/subprocess_env.py +58 -5
- invar/shell/testing.py +59 -31
- {invar_tools-1.17.12.dist-info → invar_tools-1.17.21.dist-info}/METADATA +3 -3
- {invar_tools-1.17.12.dist-info → invar_tools-1.17.21.dist-info}/RECORD +19 -19
- {invar_tools-1.17.12.dist-info → invar_tools-1.17.21.dist-info}/WHEEL +0 -0
- {invar_tools-1.17.12.dist-info → invar_tools-1.17.21.dist-info}/entry_points.txt +0 -0
- {invar_tools-1.17.12.dist-info → invar_tools-1.17.21.dist-info}/licenses/LICENSE +0 -0
- {invar_tools-1.17.12.dist-info → invar_tools-1.17.21.dist-info}/licenses/LICENSE-GPL +0 -0
- {invar_tools-1.17.12.dist-info → invar_tools-1.17.21.dist-info}/licenses/NOTICE +0 -0
invar/mcp/handlers.py
CHANGED
|
@@ -427,11 +427,24 @@ async def _execute_command(
|
|
|
427
427
|
timeout=timeout,
|
|
428
428
|
)
|
|
429
429
|
|
|
430
|
+
stdout = result.stdout.strip()
|
|
431
|
+
|
|
432
|
+
# Try to parse as JSON
|
|
430
433
|
try:
|
|
431
|
-
parsed = json.loads(
|
|
434
|
+
parsed = json.loads(stdout)
|
|
432
435
|
return ([TextContent(type="text", text=json.dumps(parsed, indent=2))], parsed)
|
|
433
436
|
except json.JSONDecodeError:
|
|
434
|
-
|
|
437
|
+
# Try to fix unescaped newlines in JSON strings
|
|
438
|
+
# Guard/map commands may output multiline JSON with literal newlines
|
|
439
|
+
fixed = _fix_json_newlines(stdout)
|
|
440
|
+
try:
|
|
441
|
+
parsed = json.loads(fixed)
|
|
442
|
+
return ([TextContent(type="text", text=json.dumps(parsed, indent=2))], parsed)
|
|
443
|
+
except json.JSONDecodeError:
|
|
444
|
+
pass
|
|
445
|
+
|
|
446
|
+
# Fall back to text output
|
|
447
|
+
output = stdout
|
|
435
448
|
if result.stderr:
|
|
436
449
|
output += f"\n\nStderr:\n{result.stderr}"
|
|
437
450
|
return [TextContent(type="text", text=output)]
|
|
@@ -440,3 +453,46 @@ async def _execute_command(
|
|
|
440
453
|
return [TextContent(type="text", text=f"Error: Command timed out ({timeout}s)")]
|
|
441
454
|
except Exception as e:
|
|
442
455
|
return [TextContent(type="text", text=f"Error: {e}")]
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
# @invar:allow shell_too_complex: Simple state machine, 6 branches is minimal
|
|
459
|
+
# @invar:allow shell_pure_logic: No I/O, but called from shell context
|
|
460
|
+
# @invar:allow shell_result: Pure transformation, returns str not Result
|
|
461
|
+
def _fix_json_newlines(text: str) -> str:
|
|
462
|
+
"""Fix unescaped newlines in JSON strings.
|
|
463
|
+
|
|
464
|
+
When subprocess outputs multiline JSON, newlines inside string values
|
|
465
|
+
are not escaped, causing json.loads() to fail. This function escapes them.
|
|
466
|
+
|
|
467
|
+
DX-33: Escape hatch for complex pure logic helper.
|
|
468
|
+
"""
|
|
469
|
+
result = []
|
|
470
|
+
i = 0
|
|
471
|
+
while i < len(text):
|
|
472
|
+
if text[i] == '"':
|
|
473
|
+
# Inside a string - collect until closing quote
|
|
474
|
+
result.append('"')
|
|
475
|
+
i += 1
|
|
476
|
+
while i < len(text):
|
|
477
|
+
c = text[i]
|
|
478
|
+
if c == "\\" and i + 1 < len(text):
|
|
479
|
+
# Escaped character - keep as is
|
|
480
|
+
result.append("\\")
|
|
481
|
+
result.append(text[i + 1])
|
|
482
|
+
i += 2
|
|
483
|
+
elif c == '"':
|
|
484
|
+
# End of string
|
|
485
|
+
result.append('"')
|
|
486
|
+
i += 1
|
|
487
|
+
break
|
|
488
|
+
elif c == "\n" or c == "\r":
|
|
489
|
+
# Unescaped newline - escape it
|
|
490
|
+
result.append("\\n")
|
|
491
|
+
i += 1
|
|
492
|
+
else:
|
|
493
|
+
result.append(c)
|
|
494
|
+
i += 1
|
|
495
|
+
else:
|
|
496
|
+
result.append(text[i])
|
|
497
|
+
i += 1
|
|
498
|
+
return "".join(result)
|
|
@@ -16,11 +16,61 @@
|
|
|
16
16
|
import { ESLint } from 'eslint';
|
|
17
17
|
import { resolve, dirname } from 'path';
|
|
18
18
|
import { statSync, realpathSync } from 'fs';
|
|
19
|
+
import { spawnSync } from 'child_process';
|
|
19
20
|
import { fileURLToPath } from 'url';
|
|
21
|
+
import { createRequire } from 'module';
|
|
20
22
|
import plugin from './index.js';
|
|
21
23
|
// Get the directory where this CLI script is located (embedded in site-packages)
|
|
22
24
|
const __filename = fileURLToPath(import.meta.url);
|
|
23
25
|
const __dirname = dirname(__filename);
|
|
26
|
+
|
|
27
|
+
const require = createRequire(import.meta.url);
|
|
28
|
+
|
|
29
|
+
function resolveTsParser(projectPath) {
|
|
30
|
+
try {
|
|
31
|
+
const tseslintEntry = require.resolve('typescript-eslint', { paths: [projectPath] });
|
|
32
|
+
if (tseslintEntry) {
|
|
33
|
+
const tseslintRoot = dirname(dirname(tseslintEntry));
|
|
34
|
+
return require.resolve('@typescript-eslint/parser', { paths: [tseslintRoot] });
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
catch {
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
try {
|
|
41
|
+
return require.resolve('@typescript-eslint/parser', { paths: [projectPath] });
|
|
42
|
+
}
|
|
43
|
+
catch {
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
try {
|
|
47
|
+
return require.resolve('@typescript-eslint/parser', { paths: [__dirname] });
|
|
48
|
+
}
|
|
49
|
+
catch {
|
|
50
|
+
return null;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
function _gitLsFiles(projectPath) {
|
|
54
|
+
const check = spawnSync('git', ['-C', projectPath, 'rev-parse', '--is-inside-work-tree'], {
|
|
55
|
+
encoding: 'utf8',
|
|
56
|
+
timeout: 2000,
|
|
57
|
+
});
|
|
58
|
+
if (check.status !== 0) {
|
|
59
|
+
return null;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const ls = spawnSync('git', ['-C', projectPath, 'ls-files', '-z', '--', '*.ts', '*.tsx'], {
|
|
63
|
+
encoding: 'utf8',
|
|
64
|
+
timeout: 15000,
|
|
65
|
+
});
|
|
66
|
+
if (ls.status !== 0 || !ls.stdout) {
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const files = ls.stdout.split('\0').filter(Boolean);
|
|
71
|
+
return files.length > 0 ? files : null;
|
|
72
|
+
}
|
|
73
|
+
|
|
24
74
|
function parseArgs(args) {
|
|
25
75
|
const projectPath = args.find(arg => !arg.startsWith('--')) || '.';
|
|
26
76
|
const configArg = args.find(arg => arg.startsWith('--config='));
|
|
@@ -93,44 +143,35 @@ async function main() {
|
|
|
93
143
|
console.error(`Config "${args.config}" not found or invalid`);
|
|
94
144
|
process.exit(1);
|
|
95
145
|
}
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
baseConfig: {
|
|
104
|
-
parser: '@typescript-eslint/parser', // Will resolve from __dirname/node_modules
|
|
105
|
-
parserOptions: {
|
|
106
|
-
ecmaVersion: 2022,
|
|
107
|
-
sourceType: 'module',
|
|
108
|
-
},
|
|
109
|
-
plugins: ['@invar'],
|
|
110
|
-
rules: selectedConfig.rules,
|
|
111
|
-
},
|
|
112
|
-
plugins: {
|
|
113
|
-
'@invar': plugin, // Register plugin directly
|
|
114
|
-
},
|
|
115
|
-
}); // Type assertion for ESLint config complexity
|
|
116
|
-
// Lint the project - detect if path is a file or directory
|
|
117
|
-
// ESLint defaults to .js only, so we need glob patterns for .ts/.tsx
|
|
146
|
+
const tsParser = resolveTsParser(projectPath);
|
|
147
|
+
if (!tsParser) {
|
|
148
|
+
console.error("ESLint failed: Failed to load TypeScript parser.");
|
|
149
|
+
console.error("Install either 'typescript-eslint' or '@typescript-eslint/parser' in your project.");
|
|
150
|
+
process.exit(1);
|
|
151
|
+
}
|
|
152
|
+
|
|
118
153
|
let filesToLint;
|
|
154
|
+
let lintCwd = projectPath;
|
|
155
|
+
let globInputPaths = true;
|
|
119
156
|
try {
|
|
120
157
|
const stats = statSync(projectPath);
|
|
121
|
-
// Note: Advisory check for optimization - TOCTOU race condition is acceptable
|
|
122
|
-
// because ESLint will handle file system changes gracefully during actual linting
|
|
123
158
|
if (stats.isFile()) {
|
|
124
|
-
|
|
159
|
+
lintCwd = dirname(projectPath);
|
|
125
160
|
filesToLint = [projectPath];
|
|
161
|
+
globInputPaths = false;
|
|
126
162
|
}
|
|
127
163
|
else if (stats.isDirectory()) {
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
164
|
+
const gitFiles = _gitLsFiles(projectPath);
|
|
165
|
+
if (gitFiles) {
|
|
166
|
+
filesToLint = gitFiles;
|
|
167
|
+
globInputPaths = false;
|
|
168
|
+
}
|
|
169
|
+
else {
|
|
170
|
+
filesToLint = [
|
|
171
|
+
"**/*.ts",
|
|
172
|
+
"**/*.tsx",
|
|
173
|
+
];
|
|
174
|
+
}
|
|
134
175
|
}
|
|
135
176
|
else {
|
|
136
177
|
console.error(`Error: Path is neither a file nor a directory: ${projectPath}`);
|
|
@@ -142,6 +183,39 @@ async function main() {
|
|
|
142
183
|
console.error(`Error: Cannot access path: ${errorMessage}`);
|
|
143
184
|
process.exit(1);
|
|
144
185
|
}
|
|
186
|
+
|
|
187
|
+
const eslint = new ESLint({
|
|
188
|
+
useEslintrc: false,
|
|
189
|
+
cwd: lintCwd,
|
|
190
|
+
resolvePluginsRelativeTo: __dirname,
|
|
191
|
+
errorOnUnmatchedPattern: false,
|
|
192
|
+
globInputPaths,
|
|
193
|
+
baseConfig: {
|
|
194
|
+
parser: tsParser,
|
|
195
|
+
parserOptions: {
|
|
196
|
+
ecmaVersion: 2022,
|
|
197
|
+
sourceType: 'module',
|
|
198
|
+
},
|
|
199
|
+
plugins: ['@invar'],
|
|
200
|
+
rules: selectedConfig.rules,
|
|
201
|
+
ignorePatterns: [
|
|
202
|
+
'**/node_modules/**',
|
|
203
|
+
'**/.next/**',
|
|
204
|
+
'**/dist/**',
|
|
205
|
+
'**/build/**',
|
|
206
|
+
'**/.cache/**',
|
|
207
|
+
'**/coverage/**',
|
|
208
|
+
'**/.turbo/**',
|
|
209
|
+
'**/.vercel/**',
|
|
210
|
+
'**/playwright-report/**',
|
|
211
|
+
'**/test-results/**',
|
|
212
|
+
],
|
|
213
|
+
},
|
|
214
|
+
plugins: {
|
|
215
|
+
'@invar': plugin, // Register plugin directly
|
|
216
|
+
},
|
|
217
|
+
});
|
|
218
|
+
|
|
145
219
|
const results = await eslint.lintFiles(filesToLint);
|
|
146
220
|
// Output in standard ESLint JSON format (compatible with guard_ts.py)
|
|
147
221
|
const formatter = await eslint.loadFormatter('json');
|
|
@@ -77,49 +77,58 @@ function matchesEnforcePattern(filePath, patterns) {
|
|
|
77
77
|
function isZodType(typeAnnotation) {
|
|
78
78
|
return ZOD_TYPE_PATTERNS.some(pattern => pattern.test(typeAnnotation));
|
|
79
79
|
}
|
|
80
|
-
function
|
|
80
|
+
function collectParseArgs(body, visitorKeys) {
|
|
81
|
+
const parsed = new Set();
|
|
81
82
|
if (!body)
|
|
82
|
-
return
|
|
83
|
-
|
|
84
|
-
const MAX_DEPTH = 50;
|
|
85
|
-
const
|
|
86
|
-
|
|
87
|
-
|
|
83
|
+
return parsed;
|
|
84
|
+
|
|
85
|
+
const MAX_DEPTH = 50;
|
|
86
|
+
const stack = [{ node: body, depth: 0 }];
|
|
87
|
+
|
|
88
|
+
while (stack.length > 0) {
|
|
89
|
+
const current = stack.pop();
|
|
90
|
+
if (!current)
|
|
91
|
+
continue;
|
|
92
|
+
const node = current.node;
|
|
93
|
+
const depth = current.depth;
|
|
94
|
+
if (!node || typeof node !== 'object')
|
|
95
|
+
continue;
|
|
88
96
|
if (depth > MAX_DEPTH)
|
|
89
|
-
|
|
97
|
+
continue;
|
|
98
|
+
|
|
90
99
|
if (node.type === 'CallExpression') {
|
|
91
100
|
const callee = node.callee;
|
|
92
|
-
if (callee.type === 'MemberExpression') {
|
|
101
|
+
if (callee && callee.type === 'MemberExpression') {
|
|
93
102
|
const property = callee.property;
|
|
94
|
-
if (property.type === 'Identifier' &&
|
|
95
|
-
(
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
return;
|
|
103
|
+
if (property && property.type === 'Identifier' && (property.name === 'parse' || property.name === 'safeParse')) {
|
|
104
|
+
for (const arg of node.arguments || []) {
|
|
105
|
+
if (arg && arg.type === 'Identifier') {
|
|
106
|
+
parsed.add(arg.name);
|
|
107
|
+
}
|
|
100
108
|
}
|
|
101
109
|
}
|
|
102
110
|
}
|
|
103
111
|
}
|
|
104
|
-
|
|
105
|
-
|
|
112
|
+
|
|
113
|
+
const keys = (visitorKeys && node.type && visitorKeys[node.type]) || [];
|
|
114
|
+
for (const key of keys) {
|
|
106
115
|
const value = node[key];
|
|
107
|
-
if (value
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
}
|
|
116
|
+
if (!value)
|
|
117
|
+
continue;
|
|
118
|
+
if (Array.isArray(value)) {
|
|
119
|
+
for (const item of value) {
|
|
120
|
+
if (item && typeof item === 'object' && item.type) {
|
|
121
|
+
stack.push({ node: item, depth: depth + 1 });
|
|
113
122
|
}
|
|
114
123
|
}
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
}
|
|
124
|
+
}
|
|
125
|
+
else if (typeof value === 'object' && value.type) {
|
|
126
|
+
stack.push({ node: value, depth: depth + 1 });
|
|
118
127
|
}
|
|
119
128
|
}
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
return
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
return parsed;
|
|
123
132
|
}
|
|
124
133
|
export const requireSchemaValidation = {
|
|
125
134
|
meta: {
|
|
@@ -206,49 +215,54 @@ export const requireSchemaValidation = {
|
|
|
206
215
|
}
|
|
207
216
|
function checkFunction(node, params) {
|
|
208
217
|
const functionName = getFunctionName(node);
|
|
209
|
-
// Skip if shouldn't check based on mode
|
|
210
218
|
if (!shouldCheck(functionName)) {
|
|
211
219
|
return;
|
|
212
220
|
}
|
|
221
|
+
|
|
213
222
|
const body = 'body' in node ? node.body : null;
|
|
223
|
+
const zodParams = params.filter((p) => p.typeAnnotation && isZodType(p.typeAnnotation) && p.name && p.name !== '{...}' && p.name !== '[...]');
|
|
224
|
+
if (zodParams.length === 0) {
|
|
225
|
+
return;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
const parsedArgs = collectParseArgs(body, sourceCode.visitorKeys);
|
|
214
229
|
const isRiskFunction = isHighRiskFunction(functionName, filename);
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
const schemaMatch = param.typeAnnotation.match(/typeof\s+(\w+)/);
|
|
220
|
-
const schemaName = schemaMatch ? schemaMatch[1] : 'Schema';
|
|
221
|
-
const validatedVarName = `validated${param.name.charAt(0).toUpperCase()}${param.name.slice(1)}`;
|
|
222
|
-
context.report({
|
|
223
|
-
node: node,
|
|
224
|
-
messageId: isRiskFunction ? 'missingValidationRisk' : 'missingValidation',
|
|
225
|
-
data: {
|
|
226
|
-
name: param.name,
|
|
227
|
-
functionName: functionName,
|
|
228
|
-
},
|
|
229
|
-
suggest: [
|
|
230
|
-
{
|
|
231
|
-
messageId: 'addParseCall',
|
|
232
|
-
data: { name: param.name },
|
|
233
|
-
fix(fixer) {
|
|
234
|
-
// Find the opening brace of the function body
|
|
235
|
-
if (!body || body.type !== 'BlockStatement')
|
|
236
|
-
return null;
|
|
237
|
-
const blockBody = body;
|
|
238
|
-
if (!blockBody.body || blockBody.body.length === 0)
|
|
239
|
-
return null;
|
|
240
|
-
const firstStatement = blockBody.body[0];
|
|
241
|
-
// Detect indentation from the first statement
|
|
242
|
-
const firstStatementStart = firstStatement.loc?.start.column ?? 2;
|
|
243
|
-
const indent = ' '.repeat(firstStatementStart);
|
|
244
|
-
const parseCode = `const ${validatedVarName} = ${schemaName}.parse(${param.name});\n${indent}`;
|
|
245
|
-
return fixer.insertTextBefore(firstStatement, parseCode);
|
|
246
|
-
},
|
|
247
|
-
},
|
|
248
|
-
],
|
|
249
|
-
});
|
|
250
|
-
}
|
|
230
|
+
|
|
231
|
+
for (const param of zodParams) {
|
|
232
|
+
if (parsedArgs.has(param.name)) {
|
|
233
|
+
continue;
|
|
251
234
|
}
|
|
235
|
+
|
|
236
|
+
const schemaMatch = param.typeAnnotation.match(/typeof\s+(\w+)/);
|
|
237
|
+
const schemaName = schemaMatch ? schemaMatch[1] : 'Schema';
|
|
238
|
+
const validatedVarName = `validated${param.name.charAt(0).toUpperCase()}${param.name.slice(1)}`;
|
|
239
|
+
|
|
240
|
+
context.report({
|
|
241
|
+
node: node,
|
|
242
|
+
messageId: isRiskFunction ? 'missingValidationRisk' : 'missingValidation',
|
|
243
|
+
data: {
|
|
244
|
+
name: param.name,
|
|
245
|
+
functionName: functionName,
|
|
246
|
+
},
|
|
247
|
+
suggest: [
|
|
248
|
+
{
|
|
249
|
+
messageId: 'addParseCall',
|
|
250
|
+
data: { name: param.name },
|
|
251
|
+
fix(fixer) {
|
|
252
|
+
if (!body || body.type !== 'BlockStatement')
|
|
253
|
+
return null;
|
|
254
|
+
const blockBody = body;
|
|
255
|
+
if (!blockBody.body || blockBody.body.length === 0)
|
|
256
|
+
return null;
|
|
257
|
+
const firstStatement = blockBody.body[0];
|
|
258
|
+
const firstStatementStart = firstStatement.loc?.start.column ?? 2;
|
|
259
|
+
const indent = ' '.repeat(firstStatementStart);
|
|
260
|
+
const parseCode = `const ${validatedVarName} = ${schemaName}.parse(${param.name});\n${indent}`;
|
|
261
|
+
return fixer.insertTextBefore(firstStatement, parseCode);
|
|
262
|
+
},
|
|
263
|
+
},
|
|
264
|
+
],
|
|
265
|
+
});
|
|
252
266
|
}
|
|
253
267
|
}
|
|
254
268
|
/**
|
invar/shell/commands/guard.py
CHANGED
|
@@ -6,6 +6,8 @@ Shell module: handles user interaction and file I/O.
|
|
|
6
6
|
|
|
7
7
|
from __future__ import annotations
|
|
8
8
|
|
|
9
|
+
import os
|
|
10
|
+
import sys
|
|
9
11
|
from pathlib import Path
|
|
10
12
|
|
|
11
13
|
import typer
|
|
@@ -17,7 +19,7 @@ from invar import __version__
|
|
|
17
19
|
from invar.core.models import GuardReport, RuleConfig
|
|
18
20
|
from invar.core.rules import check_all_rules
|
|
19
21
|
from invar.core.utils import get_exit_code
|
|
20
|
-
from invar.shell.config import find_project_root, load_config
|
|
22
|
+
from invar.shell.config import find_project_root, find_pyproject_root, load_config
|
|
21
23
|
from invar.shell.fs import scan_project
|
|
22
24
|
from invar.shell.guard_output import output_agent, output_rich
|
|
23
25
|
|
|
@@ -190,12 +192,28 @@ def guard(
|
|
|
190
192
|
ts_result = run_typescript_guard(path if path.is_dir() else find_project_root(path))
|
|
191
193
|
match ts_result:
|
|
192
194
|
case Success(result):
|
|
193
|
-
|
|
195
|
+
if human:
|
|
196
|
+
# Human-readable Rich output
|
|
197
|
+
from invar.shell.prove.guard_ts import format_typescript_guard_v2
|
|
198
|
+
|
|
199
|
+
output = format_typescript_guard_v2(result)
|
|
200
|
+
console.print(f"[bold]TypeScript Guard[/bold] ({project_language})")
|
|
201
|
+
if result.status == "passed":
|
|
202
|
+
console.print("[green]✓ PASSED[/green]")
|
|
203
|
+
elif result.status == "skipped":
|
|
204
|
+
console.print("[yellow]⚠ SKIPPED[/yellow] (no TypeScript tools available)")
|
|
205
|
+
else:
|
|
206
|
+
console.print(f"[red]✗ FAILED[/red] ({result.error_count} errors)")
|
|
207
|
+
for v in result.violations[:10]: # Show first 10
|
|
208
|
+
console.print(f" {v.file}:{v.line}: [{v.severity}] {v.message}")
|
|
209
|
+
else:
|
|
210
|
+
# JSON output for agents
|
|
211
|
+
import json as json_mod
|
|
194
212
|
|
|
195
|
-
|
|
213
|
+
from invar.shell.prove.guard_ts import format_typescript_guard_v2
|
|
196
214
|
|
|
197
|
-
|
|
198
|
-
|
|
215
|
+
output = format_typescript_guard_v2(result)
|
|
216
|
+
console.print(json_mod.dumps(output, indent=2))
|
|
199
217
|
raise typer.Exit(0 if result.status == "passed" else 1)
|
|
200
218
|
case Failure(err):
|
|
201
219
|
console.print(f"[red]Error:[/red] {err}")
|
|
@@ -209,7 +227,27 @@ def guard(
|
|
|
209
227
|
console.print(f"[red]Error:[/red] {path} is not a Python file")
|
|
210
228
|
raise typer.Exit(1)
|
|
211
229
|
single_file = path.resolve()
|
|
212
|
-
|
|
230
|
+
|
|
231
|
+
pyproject_root = find_pyproject_root(single_file if single_file else path)
|
|
232
|
+
if pyproject_root is None:
|
|
233
|
+
console.print(
|
|
234
|
+
"[red]Error:[/red] pyproject.toml not found (searched upward from the target path)"
|
|
235
|
+
)
|
|
236
|
+
raise typer.Exit(1)
|
|
237
|
+
path = pyproject_root
|
|
238
|
+
|
|
239
|
+
from invar.shell.subprocess_env import get_uvx_respawn_command
|
|
240
|
+
|
|
241
|
+
cmd = get_uvx_respawn_command(
|
|
242
|
+
project_root=path,
|
|
243
|
+
argv=sys.argv[1:],
|
|
244
|
+
tool_name=Path(sys.argv[0]).name,
|
|
245
|
+
invar_tools_version=__version__,
|
|
246
|
+
)
|
|
247
|
+
if cmd is not None:
|
|
248
|
+
env = os.environ.copy()
|
|
249
|
+
env["INVAR_UVX_RESPAWNED"] = "1"
|
|
250
|
+
os.execvpe(cmd[0], cmd, env)
|
|
213
251
|
|
|
214
252
|
# Load and configure
|
|
215
253
|
config_result = load_config(path)
|
|
@@ -357,6 +395,7 @@ def guard(
|
|
|
357
395
|
|
|
358
396
|
# Phase 1: Doctests (DX-37: with optional coverage)
|
|
359
397
|
doctest_passed, doctest_output, doctest_coverage = run_doctests_phase(
|
|
398
|
+
path,
|
|
360
399
|
checked_files,
|
|
361
400
|
explain,
|
|
362
401
|
timeout=config.timeout_doctest,
|
|
@@ -377,6 +416,7 @@ def guard(
|
|
|
377
416
|
|
|
378
417
|
# Phase 3: Hypothesis property tests (DX-37: with optional coverage)
|
|
379
418
|
property_passed, property_output, property_coverage = run_property_tests_phase(
|
|
419
|
+
path,
|
|
380
420
|
checked_files,
|
|
381
421
|
doctest_passed,
|
|
382
422
|
static_exit_code,
|
invar/shell/config.py
CHANGED
|
@@ -39,11 +39,27 @@ class ModuleType(Enum):
|
|
|
39
39
|
|
|
40
40
|
|
|
41
41
|
# I/O libraries that indicate Shell module (for AST import checking)
|
|
42
|
-
_IO_LIBRARIES = frozenset(
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
42
|
+
_IO_LIBRARIES = frozenset(
|
|
43
|
+
[
|
|
44
|
+
"os",
|
|
45
|
+
"sys",
|
|
46
|
+
"subprocess",
|
|
47
|
+
"pathlib",
|
|
48
|
+
"shutil",
|
|
49
|
+
"io",
|
|
50
|
+
"socket",
|
|
51
|
+
"requests",
|
|
52
|
+
"aiohttp",
|
|
53
|
+
"httpx",
|
|
54
|
+
"urllib",
|
|
55
|
+
"sqlite3",
|
|
56
|
+
"psycopg2",
|
|
57
|
+
"pymongo",
|
|
58
|
+
"sqlalchemy",
|
|
59
|
+
"typer",
|
|
60
|
+
"click",
|
|
61
|
+
]
|
|
62
|
+
)
|
|
47
63
|
|
|
48
64
|
# Contract decorator names
|
|
49
65
|
_CONTRACT_DECORATORS = frozenset(["pre", "post", "invariant"])
|
|
@@ -226,6 +242,7 @@ def auto_detect_module_type(source: str, file_path: str = "") -> ModuleType:
|
|
|
226
242
|
# Unknown: neither clear pattern
|
|
227
243
|
return ModuleType.UNKNOWN
|
|
228
244
|
|
|
245
|
+
|
|
229
246
|
if TYPE_CHECKING:
|
|
230
247
|
from pathlib import Path
|
|
231
248
|
|
|
@@ -268,6 +285,20 @@ def _find_config_source(project_root: Path) -> Result[tuple[Path | None, ConfigS
|
|
|
268
285
|
|
|
269
286
|
|
|
270
287
|
# @shell_complexity: Project root discovery requires checking multiple markers
|
|
288
|
+
def find_pyproject_root(start_path: "Path") -> "Path | None": # noqa: UP037
|
|
289
|
+
from pathlib import Path
|
|
290
|
+
|
|
291
|
+
current = Path(start_path).resolve()
|
|
292
|
+
if current.is_file():
|
|
293
|
+
current = current.parent
|
|
294
|
+
|
|
295
|
+
for parent in [current, *current.parents]:
|
|
296
|
+
if (parent / "pyproject.toml").exists():
|
|
297
|
+
return parent
|
|
298
|
+
|
|
299
|
+
return None
|
|
300
|
+
|
|
301
|
+
|
|
271
302
|
def find_project_root(start_path: "Path") -> "Path": # noqa: UP037
|
|
272
303
|
"""
|
|
273
304
|
Find project root by walking up from start_path looking for config files.
|
|
@@ -492,6 +523,7 @@ def classify_file(
|
|
|
492
523
|
else:
|
|
493
524
|
# Log warning about config error, use defaults
|
|
494
525
|
import logging
|
|
526
|
+
|
|
495
527
|
logging.getLogger(__name__).debug(
|
|
496
528
|
"Pattern classification failed: %s, using defaults", pattern_result.failure()
|
|
497
529
|
)
|
|
@@ -503,6 +535,7 @@ def classify_file(
|
|
|
503
535
|
else:
|
|
504
536
|
# Log warning about config error, use defaults
|
|
505
537
|
import logging
|
|
538
|
+
|
|
506
539
|
logging.getLogger(__name__).debug(
|
|
507
540
|
"Path classification failed: %s, using defaults", path_result.failure()
|
|
508
541
|
)
|
invar/shell/git.py
CHANGED
|
@@ -7,13 +7,10 @@ Shell module: handles git I/O for changed file detection.
|
|
|
7
7
|
from __future__ import annotations
|
|
8
8
|
|
|
9
9
|
import subprocess
|
|
10
|
-
from
|
|
10
|
+
from pathlib import Path
|
|
11
11
|
|
|
12
12
|
from returns.result import Failure, Result, Success
|
|
13
13
|
|
|
14
|
-
if TYPE_CHECKING:
|
|
15
|
-
from pathlib import Path
|
|
16
|
-
|
|
17
14
|
|
|
18
15
|
def _run_git(args: list[str], cwd: Path) -> Result[str, str]:
|
|
19
16
|
"""Run a git command and return stdout."""
|
|
@@ -49,27 +46,29 @@ def get_changed_files(project_root: Path) -> Result[set[Path], str]:
|
|
|
49
46
|
>>> isinstance(result, (Success, Failure))
|
|
50
47
|
True
|
|
51
48
|
"""
|
|
52
|
-
# Verify git repo
|
|
53
49
|
check = _run_git(["rev-parse", "--git-dir"], project_root)
|
|
54
50
|
if isinstance(check, Failure):
|
|
55
51
|
return Failure(f"Not a git repository: {project_root}")
|
|
56
52
|
|
|
53
|
+
repo_root_result = _run_git(["rev-parse", "--show-toplevel"], project_root)
|
|
54
|
+
if isinstance(repo_root_result, Failure):
|
|
55
|
+
return Failure(repo_root_result.failure())
|
|
56
|
+
|
|
57
|
+
repo_root = Path(repo_root_result.unwrap().strip())
|
|
58
|
+
|
|
57
59
|
changed: set[Path] = set()
|
|
58
60
|
|
|
59
|
-
# Staged changes
|
|
60
61
|
staged = _run_git(["diff", "--cached", "--name-only"], project_root)
|
|
61
62
|
if isinstance(staged, Success):
|
|
62
|
-
changed.update(_parse_py_files(staged.unwrap(),
|
|
63
|
+
changed.update(_parse_py_files(staged.unwrap(), repo_root))
|
|
63
64
|
|
|
64
|
-
# Unstaged changes
|
|
65
65
|
unstaged = _run_git(["diff", "--name-only"], project_root)
|
|
66
66
|
if isinstance(unstaged, Success):
|
|
67
|
-
changed.update(_parse_py_files(unstaged.unwrap(),
|
|
67
|
+
changed.update(_parse_py_files(unstaged.unwrap(), repo_root))
|
|
68
68
|
|
|
69
|
-
# Untracked files
|
|
70
69
|
untracked = _run_git(["ls-files", "--others", "--exclude-standard"], project_root)
|
|
71
70
|
if isinstance(untracked, Success):
|
|
72
|
-
changed.update(_parse_py_files(untracked.unwrap(),
|
|
71
|
+
changed.update(_parse_py_files(untracked.unwrap(), repo_root))
|
|
73
72
|
|
|
74
73
|
return Success(changed)
|
|
75
74
|
|