invar-tools 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. invar/__init__.py +68 -0
  2. invar/contracts.py +152 -0
  3. invar/core/__init__.py +8 -0
  4. invar/core/contracts.py +375 -0
  5. invar/core/extraction.py +172 -0
  6. invar/core/formatter.py +281 -0
  7. invar/core/hypothesis_strategies.py +454 -0
  8. invar/core/inspect.py +154 -0
  9. invar/core/lambda_helpers.py +190 -0
  10. invar/core/models.py +289 -0
  11. invar/core/must_use.py +172 -0
  12. invar/core/parser.py +276 -0
  13. invar/core/property_gen.py +383 -0
  14. invar/core/purity.py +369 -0
  15. invar/core/purity_heuristics.py +184 -0
  16. invar/core/references.py +180 -0
  17. invar/core/rule_meta.py +203 -0
  18. invar/core/rules.py +435 -0
  19. invar/core/strategies.py +267 -0
  20. invar/core/suggestions.py +324 -0
  21. invar/core/tautology.py +137 -0
  22. invar/core/timeout_inference.py +114 -0
  23. invar/core/utils.py +364 -0
  24. invar/decorators.py +94 -0
  25. invar/invariant.py +57 -0
  26. invar/mcp/__init__.py +10 -0
  27. invar/mcp/__main__.py +13 -0
  28. invar/mcp/server.py +251 -0
  29. invar/py.typed +0 -0
  30. invar/resource.py +99 -0
  31. invar/shell/__init__.py +8 -0
  32. invar/shell/cli.py +358 -0
  33. invar/shell/config.py +248 -0
  34. invar/shell/fs.py +112 -0
  35. invar/shell/git.py +85 -0
  36. invar/shell/guard_helpers.py +324 -0
  37. invar/shell/guard_output.py +235 -0
  38. invar/shell/init_cmd.py +289 -0
  39. invar/shell/mcp_config.py +171 -0
  40. invar/shell/perception.py +125 -0
  41. invar/shell/property_tests.py +227 -0
  42. invar/shell/prove.py +460 -0
  43. invar/shell/prove_cache.py +133 -0
  44. invar/shell/prove_fallback.py +183 -0
  45. invar/shell/templates.py +443 -0
  46. invar/shell/test_cmd.py +117 -0
  47. invar/shell/testing.py +297 -0
  48. invar/shell/update_cmd.py +191 -0
  49. invar/templates/CLAUDE.md.template +58 -0
  50. invar/templates/INVAR.md +134 -0
  51. invar/templates/__init__.py +1 -0
  52. invar/templates/aider.conf.yml.template +29 -0
  53. invar/templates/context.md.template +51 -0
  54. invar/templates/cursorrules.template +28 -0
  55. invar/templates/examples/README.md +21 -0
  56. invar/templates/examples/contracts.py +111 -0
  57. invar/templates/examples/core_shell.py +121 -0
  58. invar/templates/pre-commit-config.yaml.template +44 -0
  59. invar/templates/proposal.md.template +93 -0
  60. invar_tools-1.0.0.dist-info/METADATA +321 -0
  61. invar_tools-1.0.0.dist-info/RECORD +64 -0
  62. invar_tools-1.0.0.dist-info/WHEEL +4 -0
  63. invar_tools-1.0.0.dist-info/entry_points.txt +2 -0
  64. invar_tools-1.0.0.dist-info/licenses/LICENSE +21 -0
invar/shell/prove.py ADDED
@@ -0,0 +1,460 @@
1
+ """
2
+ Proof verification with Hypothesis fallback.
3
+
4
+ Shell module: DX-12 + DX-13 implementation.
5
+ - DX-12: CrossHair verification with automatic Hypothesis fallback
6
+ - DX-13: Incremental verification, parallel execution, caching
7
+ """
8
+
9
+ from __future__ import annotations
10
+
11
+ import os
12
+ import subprocess
13
+ import sys
14
+ from concurrent.futures import ProcessPoolExecutor, as_completed
15
+ from pathlib import Path # noqa: TC003 - used at runtime
16
+ from typing import TYPE_CHECKING
17
+
18
+ from returns.result import Failure, Result, Success
19
+ from rich.console import Console
20
+
21
+ # DX-13: Cache module extracted for file size compliance
22
+ from invar.shell.prove_cache import ProveCache # noqa: TC001 - runtime usage
23
+
24
+ # DX-12: Hypothesis fallback (extracted to prove_fallback.py for file size compliance)
25
+ from invar.shell.prove_fallback import (
26
+ run_hypothesis_fallback as run_hypothesis_fallback,
27
+ )
28
+ from invar.shell.prove_fallback import (
29
+ run_prove_with_fallback as run_prove_with_fallback,
30
+ )
31
+
32
+ if TYPE_CHECKING:
33
+ from typing import Any
34
+
35
+ console = Console()
36
+
37
+
38
+ # ============================================================
39
+ # CrossHair Status Codes
40
+ # ============================================================
41
+
42
+
43
+ class CrossHairStatus:
44
+ """Status codes for CrossHair verification."""
45
+
46
+ VERIFIED = "verified"
47
+ COUNTEREXAMPLE = "counterexample_found"
48
+ SKIPPED = "skipped"
49
+ TIMEOUT = "timeout"
50
+ ERROR = "error"
51
+ CACHED = "cached"
52
+
53
+
54
+ # ============================================================
55
+ # DX-13: Contract Detection
56
+ # ============================================================
57
+
58
+
59
+ def has_verifiable_contracts(source: str) -> bool:
60
+ """
61
+ Check if source has verifiable contracts.
62
+
63
+ DX-13: Hybrid detection - fast string check + AST validation.
64
+
65
+ Args:
66
+ source: Python source code
67
+
68
+ Returns:
69
+ True if file has @pre/@post contracts worth verifying
70
+ """
71
+ # Fast path: no contract keywords at all
72
+ if "@pre" not in source and "@post" not in source:
73
+ return False
74
+
75
+ # AST validation to avoid false positives from comments/strings
76
+ try:
77
+ import ast
78
+
79
+ tree = ast.parse(source)
80
+ except SyntaxError:
81
+ return True # Conservative: assume has contracts
82
+
83
+ contract_decorators = {"pre", "post"}
84
+
85
+ for node in ast.walk(tree):
86
+ if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
87
+ for dec in node.decorator_list:
88
+ if isinstance(dec, ast.Call):
89
+ func = dec.func
90
+ # @pre(...) or @post(...)
91
+ if isinstance(func, ast.Name) and func.id in contract_decorators:
92
+ return True
93
+ # @deal.pre(...) or @deal.post(...)
94
+ if (
95
+ isinstance(func, ast.Attribute)
96
+ and func.attr in contract_decorators
97
+ ):
98
+ return True
99
+
100
+ return False
101
+
102
+
103
+ # ============================================================
104
+ # DX-13: Single File Verification (for parallel execution)
105
+ # ============================================================
106
+
107
+
108
+ def _verify_single_file(
109
+ file_path: str,
110
+ max_iterations: int = 5,
111
+ ) -> dict[str, Any]:
112
+ """
113
+ Verify a single file with CrossHair.
114
+
115
+ DX-13: Uses --max_uninteresting_iterations for adaptive timeout.
116
+
117
+ Args:
118
+ file_path: Path to Python file
119
+ max_iterations: Maximum uninteresting iterations (default: 5)
120
+
121
+ Returns:
122
+ Verification result dict
123
+ """
124
+ import time
125
+
126
+ start_time = time.time()
127
+
128
+ cmd = [
129
+ sys.executable,
130
+ "-m",
131
+ "crosshair",
132
+ "check",
133
+ file_path,
134
+ f"--max_uninteresting_iterations={max_iterations}",
135
+ "--analysis_kind=deal",
136
+ ]
137
+
138
+ try:
139
+ result = subprocess.run(
140
+ cmd,
141
+ capture_output=True,
142
+ text=True,
143
+ timeout=300, # 5 minute max per file
144
+ )
145
+
146
+ elapsed_ms = int((time.time() - start_time) * 1000)
147
+
148
+ if result.returncode == 0:
149
+ return {
150
+ "file": file_path,
151
+ "status": CrossHairStatus.VERIFIED,
152
+ "time_ms": elapsed_ms,
153
+ "stdout": result.stdout,
154
+ }
155
+ else:
156
+ counterexamples = [
157
+ line.strip()
158
+ for line in result.stdout.split("\n")
159
+ if line.strip() and "error" not in line.lower()
160
+ ]
161
+ return {
162
+ "file": file_path,
163
+ "status": CrossHairStatus.COUNTEREXAMPLE,
164
+ "time_ms": elapsed_ms,
165
+ "counterexamples": counterexamples,
166
+ "stdout": result.stdout,
167
+ }
168
+
169
+ except subprocess.TimeoutExpired:
170
+ return {
171
+ "file": file_path,
172
+ "status": CrossHairStatus.TIMEOUT,
173
+ "time_ms": 300000,
174
+ }
175
+ except Exception as e:
176
+ return {
177
+ "file": file_path,
178
+ "status": CrossHairStatus.ERROR,
179
+ "error": str(e),
180
+ }
181
+
182
+
183
+ # ============================================================
184
+ # DX-13: Parallel CrossHair Execution
185
+ # ============================================================
186
+
187
+
188
+ def run_crosshair_parallel(
189
+ files: list[Path],
190
+ max_iterations: int = 5,
191
+ max_workers: int | None = None,
192
+ cache: ProveCache | None = None,
193
+ ) -> Result[dict, str]:
194
+ """
195
+ Run CrossHair on multiple files in parallel.
196
+
197
+ DX-13: Parallel execution with caching support.
198
+
199
+ Args:
200
+ files: List of Python file paths to verify
201
+ max_iterations: Maximum uninteresting iterations per condition
202
+ max_workers: Number of parallel workers (default: CPU count)
203
+ cache: Optional verification cache
204
+
205
+ Returns:
206
+ Success with verification results or Failure with error message
207
+ """
208
+ # Check if crosshair is available
209
+ try:
210
+ import crosshair # noqa: F401
211
+ except ImportError:
212
+ return Success(
213
+ {
214
+ "status": CrossHairStatus.SKIPPED,
215
+ "reason": "CrossHair not installed (pip install crosshair-tool)",
216
+ "files": [],
217
+ }
218
+ )
219
+
220
+ if not files:
221
+ return Success(
222
+ {
223
+ "status": CrossHairStatus.SKIPPED,
224
+ "reason": "no files",
225
+ "files": [],
226
+ }
227
+ )
228
+
229
+ # Filter to Python files only
230
+ py_files = [f for f in files if f.suffix == ".py" and f.exists()]
231
+ if not py_files:
232
+ return Success(
233
+ {
234
+ "status": CrossHairStatus.SKIPPED,
235
+ "reason": "no Python files",
236
+ "files": [],
237
+ }
238
+ )
239
+
240
+ # DX-13: Filter files with contracts and check cache
241
+ files_to_verify: list[Path] = []
242
+ cached_results: list[dict] = []
243
+
244
+ for py_file in py_files:
245
+ # Check cache first
246
+ if cache and cache.is_valid(py_file):
247
+ entry = cache.get(py_file)
248
+ if entry:
249
+ cached_results.append(
250
+ {
251
+ "file": str(py_file),
252
+ "status": CrossHairStatus.CACHED,
253
+ "cached_result": entry.result,
254
+ }
255
+ )
256
+ continue
257
+
258
+ # Check if file has contracts
259
+ try:
260
+ source = py_file.read_text()
261
+ if not has_verifiable_contracts(source):
262
+ cached_results.append(
263
+ {
264
+ "file": str(py_file),
265
+ "status": CrossHairStatus.SKIPPED,
266
+ "reason": "no contracts",
267
+ }
268
+ )
269
+ continue
270
+ except OSError:
271
+ pass # Include file anyway
272
+
273
+ files_to_verify.append(py_file)
274
+
275
+ # If all files are cached/skipped, return early
276
+ if not files_to_verify:
277
+ return Success(
278
+ {
279
+ "status": CrossHairStatus.VERIFIED,
280
+ "verified": [],
281
+ "cached": [r["file"] for r in cached_results if r["status"] == "cached"],
282
+ "skipped": [r["file"] for r in cached_results if r["status"] == "skipped"],
283
+ "files": [str(f) for f in py_files],
284
+ "from_cache": True,
285
+ }
286
+ )
287
+
288
+ # Determine worker count
289
+ if max_workers is None:
290
+ max_workers = min(len(files_to_verify), os.cpu_count() or 4)
291
+
292
+ # Run verification in parallel
293
+ verified_files: list[str] = []
294
+ failed_files: list[str] = []
295
+ all_counterexamples: list[str] = []
296
+ total_time_ms = 0
297
+
298
+ if max_workers > 1 and len(files_to_verify) > 1:
299
+ # Parallel execution
300
+ with ProcessPoolExecutor(max_workers=max_workers) as executor:
301
+ futures = {
302
+ executor.submit(_verify_single_file, str(f), max_iterations): f
303
+ for f in files_to_verify
304
+ }
305
+
306
+ for future in as_completed(futures):
307
+ file_path = futures[future]
308
+ try:
309
+ result = future.result()
310
+ _process_verification_result(
311
+ result,
312
+ file_path,
313
+ verified_files,
314
+ failed_files,
315
+ all_counterexamples,
316
+ cache,
317
+ )
318
+ total_time_ms += result.get("time_ms", 0)
319
+ except Exception as e:
320
+ failed_files.append(f"{file_path} ({e})")
321
+ else:
322
+ # Sequential execution (single file or max_workers=1)
323
+ for py_file in files_to_verify:
324
+ result = _verify_single_file(str(py_file), max_iterations)
325
+ _process_verification_result(
326
+ result,
327
+ py_file,
328
+ verified_files,
329
+ failed_files,
330
+ all_counterexamples,
331
+ cache,
332
+ )
333
+ total_time_ms += result.get("time_ms", 0)
334
+
335
+ # Determine overall status
336
+ status = (
337
+ CrossHairStatus.VERIFIED if not failed_files else CrossHairStatus.COUNTEREXAMPLE
338
+ )
339
+
340
+ return Success(
341
+ {
342
+ "status": status,
343
+ "verified": verified_files,
344
+ "failed": failed_files,
345
+ "cached": [r["file"] for r in cached_results if r.get("status") == "cached"],
346
+ "skipped": [r["file"] for r in cached_results if r.get("status") == "skipped"],
347
+ "counterexamples": all_counterexamples,
348
+ "files": [str(f) for f in py_files],
349
+ "files_verified": len(files_to_verify),
350
+ "files_cached": len([r for r in cached_results if r.get("status") == "cached"]),
351
+ "total_time_ms": total_time_ms,
352
+ "workers": max_workers,
353
+ }
354
+ )
355
+
356
+
357
+ def _process_verification_result(
358
+ result: dict,
359
+ file_path: Path,
360
+ verified_files: list[str],
361
+ failed_files: list[str],
362
+ all_counterexamples: list[str],
363
+ cache: ProveCache | None,
364
+ ) -> None:
365
+ """Process a single verification result."""
366
+ status = result.get("status", "")
367
+
368
+ if status == CrossHairStatus.VERIFIED:
369
+ verified_files.append(str(file_path))
370
+ if cache:
371
+ cache.set(
372
+ file_path,
373
+ result="verified",
374
+ time_taken_ms=result.get("time_ms", 0),
375
+ )
376
+ elif status == CrossHairStatus.COUNTEREXAMPLE:
377
+ failed_files.append(str(file_path))
378
+ for ce in result.get("counterexamples", []):
379
+ all_counterexamples.append(f"{file_path.name}: {ce}")
380
+ elif status == CrossHairStatus.TIMEOUT:
381
+ failed_files.append(f"{file_path} (timeout)")
382
+ elif status == CrossHairStatus.ERROR:
383
+ failed_files.append(f"{file_path} ({result.get('error', 'unknown error')})")
384
+
385
+
386
+ # ============================================================
387
+ # Original API (backwards compatible)
388
+ # ============================================================
389
+
390
+
391
+ def run_crosshair_on_files(
392
+ files: list[Path], timeout: int = 10
393
+ ) -> Result[dict, str]:
394
+ """
395
+ Run CrossHair symbolic verification on a list of Python files.
396
+
397
+ DX-13: Now uses parallel execution with adaptive iterations.
398
+
399
+ Args:
400
+ files: List of Python file paths to verify
401
+ timeout: Ignored (kept for backwards compatibility)
402
+
403
+ Returns:
404
+ Success with verification results or Failure with error message
405
+ """
406
+ # DX-13: Use new parallel implementation with fast mode
407
+ return run_crosshair_parallel(
408
+ files,
409
+ max_iterations=5, # Fast mode
410
+ max_workers=None, # Auto-detect
411
+ cache=None, # No cache for basic API
412
+ )
413
+
414
+
415
+ # ============================================================
416
+ # DX-13: Incremental Verification API
417
+ # ============================================================
418
+
419
+
420
+ def get_files_to_prove(
421
+ path: Path,
422
+ all_core_files: list[Path],
423
+ changed_only: bool = True,
424
+ ) -> list[Path]:
425
+ """
426
+ Get files that need proof verification.
427
+
428
+ DX-13: Automatically filters to changed files when in git repo.
429
+
430
+ Args:
431
+ path: Project root path
432
+ all_core_files: All core files in project
433
+ changed_only: If True, only return changed files
434
+
435
+ Returns:
436
+ List of files to verify
437
+ """
438
+ if not changed_only:
439
+ return all_core_files
440
+
441
+ # Check if git repo
442
+ try:
443
+ from invar.shell.git import get_changed_files, is_git_repo
444
+
445
+ if not is_git_repo(path):
446
+ return all_core_files # Not a git repo, verify all
447
+
448
+ changed_result = get_changed_files(path)
449
+ if isinstance(changed_result, Failure):
450
+ return all_core_files # Git error, verify all
451
+
452
+ changed = changed_result.unwrap()
453
+ if not changed:
454
+ return [] # No changes, nothing to verify
455
+
456
+ # Filter to core files that are changed
457
+ return [f for f in all_core_files if f in changed]
458
+
459
+ except ImportError:
460
+ return all_core_files # Git module not available
@@ -0,0 +1,133 @@
1
+ """
2
+ Verification cache for proof verification.
3
+
4
+ DX-13: Caches CrossHair verification results to avoid re-verification.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import hashlib
10
+ import json
11
+ from dataclasses import dataclass, field
12
+ from datetime import datetime
13
+ from pathlib import Path # noqa: TC003 - runtime usage
14
+
15
+
16
+ @dataclass
17
+ class CacheEntry:
18
+ """Cache entry for a verified file."""
19
+
20
+ file_path: str
21
+ file_hash: str
22
+ verified_at: str
23
+ result: str
24
+ crosshair_version: str
25
+ invar_version: str
26
+ time_taken_ms: int = 0
27
+ functions_checked: int = 0
28
+
29
+
30
+ @dataclass
31
+ class ProveCache:
32
+ """Cache for proof verification results."""
33
+
34
+ cache_dir: Path
35
+ entries: dict[str, CacheEntry] = field(default_factory=dict)
36
+ _crosshair_version: str = ""
37
+ _invar_version: str = ""
38
+
39
+ def __post_init__(self) -> None:
40
+ """Initialize cache directory and load existing entries."""
41
+ self.cache_dir.mkdir(parents=True, exist_ok=True)
42
+ self._load_manifest()
43
+ self._crosshair_version = self._get_crosshair_version()
44
+ self._invar_version = self._get_invar_version()
45
+
46
+ def _load_manifest(self) -> None:
47
+ """Load cache manifest if exists."""
48
+ manifest_path = self.cache_dir / "manifest.json"
49
+ if manifest_path.exists():
50
+ try:
51
+ data = json.loads(manifest_path.read_text())
52
+ for key, entry_data in data.get("entries", {}).items():
53
+ self.entries[key] = CacheEntry(**entry_data)
54
+ except (json.JSONDecodeError, TypeError):
55
+ pass # Corrupted cache, will rebuild
56
+
57
+ def _save_manifest(self) -> None:
58
+ """Save cache manifest."""
59
+ manifest_path = self.cache_dir / "manifest.json"
60
+ data = {
61
+ "version": "1.0",
62
+ "created": datetime.now().isoformat(),
63
+ "crosshair_version": self._crosshair_version,
64
+ "invar_version": self._invar_version,
65
+ "entries": {k: vars(v) for k, v in self.entries.items()},
66
+ }
67
+ manifest_path.write_text(json.dumps(data, indent=2))
68
+
69
+ def _get_crosshair_version(self) -> str:
70
+ """Get installed CrossHair version."""
71
+ try:
72
+ import crosshair
73
+
74
+ return getattr(crosshair, "__version__", "unknown")
75
+ except ImportError:
76
+ return "not_installed"
77
+
78
+ def _get_invar_version(self) -> str:
79
+ """Get Invar version."""
80
+ try:
81
+ from invar import __version__
82
+
83
+ return __version__
84
+ except ImportError:
85
+ return "unknown"
86
+
87
+ def get(self, file_path: Path) -> CacheEntry | None:
88
+ """Get cache entry for a file."""
89
+ key = str(file_path)
90
+ return self.entries.get(key)
91
+
92
+ def is_valid(self, file_path: Path) -> bool:
93
+ """Check if cache entry is valid for file."""
94
+ entry = self.get(file_path)
95
+ if entry is None:
96
+ return False
97
+
98
+ # Check file hash
99
+ current_hash = self._hash_file(file_path)
100
+ if entry.file_hash != current_hash:
101
+ return False
102
+
103
+ # Check CrossHair version
104
+ return entry.crosshair_version == self._crosshair_version
105
+
106
+ def set(
107
+ self,
108
+ file_path: Path,
109
+ result: str,
110
+ time_taken_ms: int = 0,
111
+ functions_checked: int = 0,
112
+ ) -> None:
113
+ """Set cache entry for a file."""
114
+ key = str(file_path)
115
+ self.entries[key] = CacheEntry(
116
+ file_path=key,
117
+ file_hash=self._hash_file(file_path),
118
+ verified_at=datetime.now().isoformat(),
119
+ result=result,
120
+ crosshair_version=self._crosshair_version,
121
+ invar_version=self._invar_version,
122
+ time_taken_ms=time_taken_ms,
123
+ functions_checked=functions_checked,
124
+ )
125
+ self._save_manifest()
126
+
127
+ def _hash_file(self, file_path: Path) -> str:
128
+ """Calculate SHA256 hash of file content."""
129
+ try:
130
+ content = file_path.read_bytes()
131
+ return hashlib.sha256(content).hexdigest()[:16]
132
+ except OSError:
133
+ return "error"