zexus 1.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +0 -0
- package/README.md +2513 -0
- package/bin/zexus +2 -0
- package/bin/zpics +2 -0
- package/bin/zpm +2 -0
- package/bin/zx +2 -0
- package/bin/zx-deploy +2 -0
- package/bin/zx-dev +2 -0
- package/bin/zx-run +2 -0
- package/package.json +66 -0
- package/scripts/README.md +24 -0
- package/scripts/postinstall.js +44 -0
- package/shared_config.json +24 -0
- package/src/README.md +1525 -0
- package/src/tests/run_zexus_tests.py +117 -0
- package/src/tests/test_all_phases.zx +346 -0
- package/src/tests/test_blockchain_features.zx +306 -0
- package/src/tests/test_complexity_features.zx +321 -0
- package/src/tests/test_core_integration.py +185 -0
- package/src/tests/test_phase10_ecosystem.zx +177 -0
- package/src/tests/test_phase1_modifiers.zx +87 -0
- package/src/tests/test_phase2_plugins.zx +80 -0
- package/src/tests/test_phase3_security.zx +97 -0
- package/src/tests/test_phase4_vfs.zx +116 -0
- package/src/tests/test_phase5_types.zx +117 -0
- package/src/tests/test_phase6_metaprogramming.zx +125 -0
- package/src/tests/test_phase7_optimization.zx +132 -0
- package/src/tests/test_phase9_advanced_types.zx +157 -0
- package/src/tests/test_security_features.py +419 -0
- package/src/tests/test_security_features.zx +276 -0
- package/src/tests/test_simple_zx.zx +1 -0
- package/src/tests/test_verification_simple.zx +69 -0
- package/src/zexus/__init__.py +28 -0
- package/src/zexus/__main__.py +5 -0
- package/src/zexus/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/advanced_types.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/builtin_modules.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/capability_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/complexity_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/concurrency_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/config.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/dependency_injection.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/ecosystem.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/environment.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/error_reporter.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/hybrid_orchestrator.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/lexer.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/metaprogramming.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/module_cache.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/object.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/optimization.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/plugin_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/policy_engine.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/security.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/stdlib_integration.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/strategy_recovery.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/syntax_validator.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/type_system.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/virtual_filesystem.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/zexus_ast.cpython-312.pyc +0 -0
- package/src/zexus/__pycache__/zexus_token.cpython-312.pyc +0 -0
- package/src/zexus/advanced_types.py +401 -0
- package/src/zexus/blockchain/__init__.py +40 -0
- package/src/zexus/blockchain/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/blockchain/__pycache__/crypto.cpython-312.pyc +0 -0
- package/src/zexus/blockchain/__pycache__/ledger.cpython-312.pyc +0 -0
- package/src/zexus/blockchain/__pycache__/transaction.cpython-312.pyc +0 -0
- package/src/zexus/blockchain/crypto.py +463 -0
- package/src/zexus/blockchain/ledger.py +255 -0
- package/src/zexus/blockchain/transaction.py +267 -0
- package/src/zexus/builtin_modules.py +284 -0
- package/src/zexus/builtin_plugins.py +317 -0
- package/src/zexus/capability_system.py +372 -0
- package/src/zexus/cli/__init__.py +2 -0
- package/src/zexus/cli/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/cli/__pycache__/main.cpython-312.pyc +0 -0
- package/src/zexus/cli/main.py +707 -0
- package/src/zexus/cli/zpm.py +203 -0
- package/src/zexus/compare_interpreter_compiler.py +146 -0
- package/src/zexus/compiler/__init__.py +169 -0
- package/src/zexus/compiler/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/lexer.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/parser.cpython-312.pyc +0 -0
- package/src/zexus/compiler/__pycache__/zexus_ast.cpython-312.pyc +0 -0
- package/src/zexus/compiler/bytecode.py +266 -0
- package/src/zexus/compiler/compat_runtime.py +277 -0
- package/src/zexus/compiler/lexer.py +257 -0
- package/src/zexus/compiler/parser.py +779 -0
- package/src/zexus/compiler/semantic.py +118 -0
- package/src/zexus/compiler/zexus_ast.py +454 -0
- package/src/zexus/complexity_system.py +575 -0
- package/src/zexus/concurrency_system.py +493 -0
- package/src/zexus/config.py +201 -0
- package/src/zexus/crypto_bridge.py +19 -0
- package/src/zexus/dependency_injection.py +423 -0
- package/src/zexus/ecosystem.py +434 -0
- package/src/zexus/environment.py +101 -0
- package/src/zexus/environment_manager.py +119 -0
- package/src/zexus/error_reporter.py +314 -0
- package/src/zexus/evaluator/__init__.py +12 -0
- package/src/zexus/evaluator/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/bytecode_compiler.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/core.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/expressions.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/functions.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/integration.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/statements.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/__pycache__/utils.cpython-312.pyc +0 -0
- package/src/zexus/evaluator/bytecode_compiler.py +700 -0
- package/src/zexus/evaluator/core.py +891 -0
- package/src/zexus/evaluator/expressions.py +827 -0
- package/src/zexus/evaluator/functions.py +3989 -0
- package/src/zexus/evaluator/integration.py +396 -0
- package/src/zexus/evaluator/statements.py +4303 -0
- package/src/zexus/evaluator/utils.py +126 -0
- package/src/zexus/evaluator_original.py +2041 -0
- package/src/zexus/external_bridge.py +16 -0
- package/src/zexus/find_affected_imports.sh +155 -0
- package/src/zexus/hybrid_orchestrator.py +152 -0
- package/src/zexus/input_validation.py +259 -0
- package/src/zexus/lexer.py +571 -0
- package/src/zexus/logging.py +89 -0
- package/src/zexus/lsp/__init__.py +9 -0
- package/src/zexus/lsp/completion_provider.py +207 -0
- package/src/zexus/lsp/definition_provider.py +22 -0
- package/src/zexus/lsp/hover_provider.py +71 -0
- package/src/zexus/lsp/server.py +269 -0
- package/src/zexus/lsp/symbol_provider.py +31 -0
- package/src/zexus/metaprogramming.py +321 -0
- package/src/zexus/module_cache.py +89 -0
- package/src/zexus/module_manager.py +107 -0
- package/src/zexus/object.py +973 -0
- package/src/zexus/optimization.py +424 -0
- package/src/zexus/parser/__init__.py +31 -0
- package/src/zexus/parser/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/parser/__pycache__/parser.cpython-312.pyc +0 -0
- package/src/zexus/parser/__pycache__/strategy_context.cpython-312.pyc +0 -0
- package/src/zexus/parser/__pycache__/strategy_structural.cpython-312.pyc +0 -0
- package/src/zexus/parser/integration.py +86 -0
- package/src/zexus/parser/parser.py +3977 -0
- package/src/zexus/parser/strategy_context.py +7254 -0
- package/src/zexus/parser/strategy_structural.py +1033 -0
- package/src/zexus/persistence.py +391 -0
- package/src/zexus/plugin_system.py +290 -0
- package/src/zexus/policy_engine.py +365 -0
- package/src/zexus/profiler/__init__.py +5 -0
- package/src/zexus/profiler/profiler.py +233 -0
- package/src/zexus/purity_system.py +398 -0
- package/src/zexus/runtime/__init__.py +20 -0
- package/src/zexus/runtime/async_runtime.py +324 -0
- package/src/zexus/search_old_imports.sh +65 -0
- package/src/zexus/security.py +1407 -0
- package/src/zexus/stack_trace.py +233 -0
- package/src/zexus/stdlib/__init__.py +27 -0
- package/src/zexus/stdlib/blockchain.py +341 -0
- package/src/zexus/stdlib/compression.py +167 -0
- package/src/zexus/stdlib/crypto.py +124 -0
- package/src/zexus/stdlib/datetime.py +163 -0
- package/src/zexus/stdlib/db_mongo.py +199 -0
- package/src/zexus/stdlib/db_mysql.py +162 -0
- package/src/zexus/stdlib/db_postgres.py +163 -0
- package/src/zexus/stdlib/db_sqlite.py +133 -0
- package/src/zexus/stdlib/encoding.py +230 -0
- package/src/zexus/stdlib/fs.py +195 -0
- package/src/zexus/stdlib/http.py +219 -0
- package/src/zexus/stdlib/http_server.py +248 -0
- package/src/zexus/stdlib/json_module.py +61 -0
- package/src/zexus/stdlib/math.py +360 -0
- package/src/zexus/stdlib/os_module.py +265 -0
- package/src/zexus/stdlib/regex.py +148 -0
- package/src/zexus/stdlib/sockets.py +253 -0
- package/src/zexus/stdlib/test_framework.zx +208 -0
- package/src/zexus/stdlib/test_runner.zx +119 -0
- package/src/zexus/stdlib_integration.py +341 -0
- package/src/zexus/strategy_recovery.py +256 -0
- package/src/zexus/syntax_validator.py +356 -0
- package/src/zexus/testing/zpics.py +407 -0
- package/src/zexus/testing/zpics_runtime.py +369 -0
- package/src/zexus/type_system.py +374 -0
- package/src/zexus/validation_system.py +569 -0
- package/src/zexus/virtual_filesystem.py +355 -0
- package/src/zexus/vm/__init__.py +8 -0
- package/src/zexus/vm/__pycache__/__init__.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/async_optimizer.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/bytecode.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/cache.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/jit.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/memory_manager.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/memory_pool.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/optimizer.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/parallel_vm.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/peephole_optimizer.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/profiler.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/register_allocator.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/register_vm.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/ssa_converter.cpython-312.pyc +0 -0
- package/src/zexus/vm/__pycache__/vm.cpython-312.pyc +0 -0
- package/src/zexus/vm/async_optimizer.py +420 -0
- package/src/zexus/vm/bytecode.py +428 -0
- package/src/zexus/vm/bytecode_converter.py +297 -0
- package/src/zexus/vm/cache.py +532 -0
- package/src/zexus/vm/jit.py +720 -0
- package/src/zexus/vm/memory_manager.py +520 -0
- package/src/zexus/vm/memory_pool.py +511 -0
- package/src/zexus/vm/optimizer.py +478 -0
- package/src/zexus/vm/parallel_vm.py +899 -0
- package/src/zexus/vm/peephole_optimizer.py +452 -0
- package/src/zexus/vm/profiler.py +527 -0
- package/src/zexus/vm/register_allocator.py +462 -0
- package/src/zexus/vm/register_vm.py +520 -0
- package/src/zexus/vm/ssa_converter.py +757 -0
- package/src/zexus/vm/vm.py +1392 -0
- package/src/zexus/zexus_ast.py +1782 -0
- package/src/zexus/zexus_token.py +253 -0
- package/src/zexus/zpm/__init__.py +15 -0
- package/src/zexus/zpm/installer.py +116 -0
- package/src/zexus/zpm/package_manager.py +208 -0
- package/src/zexus/zpm/publisher.py +98 -0
- package/src/zexus/zpm/registry.py +110 -0
- package/src/zexus.egg-info/PKG-INFO +2235 -0
- package/src/zexus.egg-info/SOURCES.txt +876 -0
- package/src/zexus.egg-info/dependency_links.txt +1 -0
- package/src/zexus.egg-info/entry_points.txt +3 -0
- package/src/zexus.egg-info/not-zip-safe +1 -0
- package/src/zexus.egg-info/requires.txt +14 -0
- package/src/zexus.egg-info/top_level.txt +2 -0
- package/zexus.json +14 -0
|
@@ -0,0 +1,407 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Zexus Parser Invariant Checking System (ZPICS)
|
|
3
|
+
==============================================
|
|
4
|
+
|
|
5
|
+
A comprehensive regression prevention system that ensures parser changes
|
|
6
|
+
don't break existing functionality by maintaining parse tree fingerprints
|
|
7
|
+
and validating statement boundaries.
|
|
8
|
+
|
|
9
|
+
Author: Zexus Team
|
|
10
|
+
Date: December 2025
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import json
|
|
14
|
+
import hashlib
|
|
15
|
+
import os
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import Dict, List, Optional, Any, Tuple
|
|
18
|
+
from dataclasses import dataclass, asdict
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class ParseSnapshot:
|
|
23
|
+
"""Represents a snapshot of how a piece of code parses"""
|
|
24
|
+
source_code: str
|
|
25
|
+
source_hash: str
|
|
26
|
+
statements_count: int
|
|
27
|
+
token_boundaries: List[Tuple[int, int]] # (start, end) for each statement
|
|
28
|
+
ast_structure: Dict[str, Any]
|
|
29
|
+
variable_declarations: List[str]
|
|
30
|
+
statement_types: List[str]
|
|
31
|
+
parse_metadata: Dict[str, Any]
|
|
32
|
+
|
|
33
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
34
|
+
"""Convert to dictionary for JSON serialization"""
|
|
35
|
+
return asdict(self)
|
|
36
|
+
|
|
37
|
+
@classmethod
|
|
38
|
+
def from_dict(cls, data: Dict[str, Any]) -> 'ParseSnapshot':
|
|
39
|
+
"""Create from dictionary"""
|
|
40
|
+
return cls(**data)
|
|
41
|
+
|
|
42
|
+
def fingerprint(self) -> str:
|
|
43
|
+
"""Generate unique fingerprint for this parse result"""
|
|
44
|
+
# Create deterministic string representation
|
|
45
|
+
fp_data = {
|
|
46
|
+
'source_hash': self.source_hash,
|
|
47
|
+
'statements_count': self.statements_count,
|
|
48
|
+
'token_boundaries': self.token_boundaries,
|
|
49
|
+
'variable_declarations': sorted(self.variable_declarations),
|
|
50
|
+
'statement_types': self.statement_types
|
|
51
|
+
}
|
|
52
|
+
fp_str = json.dumps(fp_data, sort_keys=True)
|
|
53
|
+
return hashlib.sha256(fp_str.encode()).hexdigest()
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@dataclass
|
|
57
|
+
class InvariantViolation:
|
|
58
|
+
"""Represents a detected invariant violation"""
|
|
59
|
+
test_name: str
|
|
60
|
+
violation_type: str
|
|
61
|
+
expected: Any
|
|
62
|
+
actual: Any
|
|
63
|
+
severity: str # 'critical', 'warning', 'info'
|
|
64
|
+
description: str
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class ZPICSValidator:
|
|
68
|
+
"""Main validator for parser invariants"""
|
|
69
|
+
|
|
70
|
+
def __init__(self, snapshot_dir: str = ".zpics_snapshots"):
|
|
71
|
+
self.snapshot_dir = Path(snapshot_dir)
|
|
72
|
+
self.snapshot_dir.mkdir(exist_ok=True)
|
|
73
|
+
self.violations: List[InvariantViolation] = []
|
|
74
|
+
|
|
75
|
+
def create_snapshot(self, test_name: str, source_code: str) -> ParseSnapshot:
|
|
76
|
+
"""
|
|
77
|
+
Create a parse snapshot for given source code
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
test_name: Name of the test case
|
|
81
|
+
source_code: The Zexus code to parse
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
ParseSnapshot object
|
|
85
|
+
"""
|
|
86
|
+
from zexus.lexer import Lexer
|
|
87
|
+
from zexus.parser import Parser
|
|
88
|
+
|
|
89
|
+
# Hash the source code
|
|
90
|
+
source_hash = hashlib.md5(source_code.encode()).hexdigest()
|
|
91
|
+
|
|
92
|
+
# Tokenize and parse
|
|
93
|
+
try:
|
|
94
|
+
lexer = Lexer(source_code)
|
|
95
|
+
tokens = lexer.tokenize()
|
|
96
|
+
|
|
97
|
+
parser = Parser()
|
|
98
|
+
ast = parser.parse(tokens)
|
|
99
|
+
except Exception as e:
|
|
100
|
+
# If parsing fails, record it
|
|
101
|
+
return ParseSnapshot(
|
|
102
|
+
source_code=source_code,
|
|
103
|
+
source_hash=source_hash,
|
|
104
|
+
statements_count=0,
|
|
105
|
+
token_boundaries=[],
|
|
106
|
+
ast_structure={'error': str(e)},
|
|
107
|
+
variable_declarations=[],
|
|
108
|
+
statement_types=[],
|
|
109
|
+
parse_metadata={'parse_failed': True, 'error': str(e)}
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
# Extract metadata from AST
|
|
113
|
+
statements_count, token_boundaries, statement_types = self._analyze_ast(ast)
|
|
114
|
+
variable_declarations = self._extract_variables(ast)
|
|
115
|
+
ast_structure = self._ast_to_dict(ast)
|
|
116
|
+
|
|
117
|
+
return ParseSnapshot(
|
|
118
|
+
source_code=source_code,
|
|
119
|
+
source_hash=source_hash,
|
|
120
|
+
statements_count=statements_count,
|
|
121
|
+
token_boundaries=token_boundaries,
|
|
122
|
+
ast_structure=ast_structure,
|
|
123
|
+
variable_declarations=variable_declarations,
|
|
124
|
+
statement_types=statement_types,
|
|
125
|
+
parse_metadata={
|
|
126
|
+
'test_name': test_name,
|
|
127
|
+
'created_at': str(Path(__file__).stat().st_mtime)
|
|
128
|
+
}
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
def save_snapshot(self, test_name: str, snapshot: ParseSnapshot):
|
|
132
|
+
"""Save snapshot to disk"""
|
|
133
|
+
snapshot_file = self.snapshot_dir / f"{test_name}.json"
|
|
134
|
+
with open(snapshot_file, 'w') as f:
|
|
135
|
+
json.dump(snapshot.to_dict(), f, indent=2)
|
|
136
|
+
|
|
137
|
+
def load_snapshot(self, test_name: str) -> Optional[ParseSnapshot]:
|
|
138
|
+
"""Load snapshot from disk"""
|
|
139
|
+
snapshot_file = self.snapshot_dir / f"{test_name}.json"
|
|
140
|
+
if not snapshot_file.exists():
|
|
141
|
+
return None
|
|
142
|
+
|
|
143
|
+
with open(snapshot_file, 'r') as f:
|
|
144
|
+
data = json.load(f)
|
|
145
|
+
return ParseSnapshot.from_dict(data)
|
|
146
|
+
|
|
147
|
+
def validate_snapshot(self, test_name: str, current: ParseSnapshot,
|
|
148
|
+
baseline: ParseSnapshot) -> List[InvariantViolation]:
|
|
149
|
+
"""
|
|
150
|
+
Validate current parse against baseline snapshot
|
|
151
|
+
|
|
152
|
+
Returns list of violations found
|
|
153
|
+
"""
|
|
154
|
+
violations = []
|
|
155
|
+
|
|
156
|
+
# Check if source code changed
|
|
157
|
+
if current.source_hash != baseline.source_hash:
|
|
158
|
+
violations.append(InvariantViolation(
|
|
159
|
+
test_name=test_name,
|
|
160
|
+
violation_type='source_changed',
|
|
161
|
+
expected=baseline.source_hash,
|
|
162
|
+
actual=current.source_hash,
|
|
163
|
+
severity='info',
|
|
164
|
+
description='Source code has changed'
|
|
165
|
+
))
|
|
166
|
+
|
|
167
|
+
# Check statement count
|
|
168
|
+
if current.statements_count != baseline.statements_count:
|
|
169
|
+
violations.append(InvariantViolation(
|
|
170
|
+
test_name=test_name,
|
|
171
|
+
violation_type='statement_count_mismatch',
|
|
172
|
+
expected=baseline.statements_count,
|
|
173
|
+
actual=current.statements_count,
|
|
174
|
+
severity='critical',
|
|
175
|
+
description=f'Statement count changed from {baseline.statements_count} to {current.statements_count}'
|
|
176
|
+
))
|
|
177
|
+
|
|
178
|
+
# Check token boundaries
|
|
179
|
+
if current.token_boundaries != baseline.token_boundaries:
|
|
180
|
+
violations.append(InvariantViolation(
|
|
181
|
+
test_name=test_name,
|
|
182
|
+
violation_type='token_boundaries_changed',
|
|
183
|
+
expected=baseline.token_boundaries,
|
|
184
|
+
actual=current.token_boundaries,
|
|
185
|
+
severity='critical',
|
|
186
|
+
description='Token collection boundaries have changed'
|
|
187
|
+
))
|
|
188
|
+
|
|
189
|
+
# Check variable declarations
|
|
190
|
+
baseline_vars = set(baseline.variable_declarations)
|
|
191
|
+
current_vars = set(current.variable_declarations)
|
|
192
|
+
|
|
193
|
+
missing_vars = baseline_vars - current_vars
|
|
194
|
+
extra_vars = current_vars - baseline_vars
|
|
195
|
+
|
|
196
|
+
if missing_vars:
|
|
197
|
+
violations.append(InvariantViolation(
|
|
198
|
+
test_name=test_name,
|
|
199
|
+
violation_type='missing_variables',
|
|
200
|
+
expected=list(baseline_vars),
|
|
201
|
+
actual=list(current_vars),
|
|
202
|
+
severity='critical',
|
|
203
|
+
description=f'Variables missing: {missing_vars}'
|
|
204
|
+
))
|
|
205
|
+
|
|
206
|
+
if extra_vars:
|
|
207
|
+
violations.append(InvariantViolation(
|
|
208
|
+
test_name=test_name,
|
|
209
|
+
violation_type='extra_variables',
|
|
210
|
+
expected=list(baseline_vars),
|
|
211
|
+
actual=list(current_vars),
|
|
212
|
+
severity='warning',
|
|
213
|
+
description=f'Extra variables declared: {extra_vars}'
|
|
214
|
+
))
|
|
215
|
+
|
|
216
|
+
# Check statement types
|
|
217
|
+
if current.statement_types != baseline.statement_types:
|
|
218
|
+
violations.append(InvariantViolation(
|
|
219
|
+
test_name=test_name,
|
|
220
|
+
violation_type='statement_types_changed',
|
|
221
|
+
expected=baseline.statement_types,
|
|
222
|
+
actual=current.statement_types,
|
|
223
|
+
severity='critical',
|
|
224
|
+
description='Statement types sequence has changed'
|
|
225
|
+
))
|
|
226
|
+
|
|
227
|
+
# Check fingerprints
|
|
228
|
+
if current.fingerprint() != baseline.fingerprint():
|
|
229
|
+
violations.append(InvariantViolation(
|
|
230
|
+
test_name=test_name,
|
|
231
|
+
violation_type='fingerprint_mismatch',
|
|
232
|
+
expected=baseline.fingerprint(),
|
|
233
|
+
actual=current.fingerprint(),
|
|
234
|
+
severity='critical',
|
|
235
|
+
description='Parse tree fingerprint has changed'
|
|
236
|
+
))
|
|
237
|
+
|
|
238
|
+
return violations
|
|
239
|
+
|
|
240
|
+
def _analyze_ast(self, ast) -> Tuple[int, List[Tuple[int, int]], List[str]]:
|
|
241
|
+
"""Extract statement count, token boundaries, and types from AST"""
|
|
242
|
+
if not hasattr(ast, 'statements'):
|
|
243
|
+
return 0, [], []
|
|
244
|
+
|
|
245
|
+
statements = ast.statements if hasattr(ast, 'statements') else []
|
|
246
|
+
count = len(statements)
|
|
247
|
+
boundaries = []
|
|
248
|
+
types = []
|
|
249
|
+
|
|
250
|
+
for stmt in statements:
|
|
251
|
+
stmt_type = type(stmt).__name__
|
|
252
|
+
types.append(stmt_type)
|
|
253
|
+
|
|
254
|
+
# Try to get token boundaries if available
|
|
255
|
+
if hasattr(stmt, 'start_pos') and hasattr(stmt, 'end_pos'):
|
|
256
|
+
boundaries.append((stmt.start_pos, stmt.end_pos))
|
|
257
|
+
else:
|
|
258
|
+
boundaries.append((0, 0)) # Placeholder
|
|
259
|
+
|
|
260
|
+
return count, boundaries, types
|
|
261
|
+
|
|
262
|
+
def _extract_variables(self, ast) -> List[str]:
|
|
263
|
+
"""Extract all variable declarations from AST"""
|
|
264
|
+
variables = []
|
|
265
|
+
|
|
266
|
+
def walk(node):
|
|
267
|
+
if node is None:
|
|
268
|
+
return
|
|
269
|
+
|
|
270
|
+
# Check for variable declarations
|
|
271
|
+
node_type = type(node).__name__
|
|
272
|
+
if node_type in ['LetStatement', 'ConstStatement']:
|
|
273
|
+
if hasattr(node, 'name') and hasattr(node.name, 'value'):
|
|
274
|
+
variables.append(node.name.value)
|
|
275
|
+
|
|
276
|
+
# Recursively walk children
|
|
277
|
+
if hasattr(node, 'statements'):
|
|
278
|
+
for stmt in node.statements:
|
|
279
|
+
walk(stmt)
|
|
280
|
+
if hasattr(node, 'body'):
|
|
281
|
+
walk(node.body)
|
|
282
|
+
|
|
283
|
+
walk(ast)
|
|
284
|
+
return variables
|
|
285
|
+
|
|
286
|
+
def _ast_to_dict(self, node, max_depth: int = 5) -> Dict[str, Any]:
|
|
287
|
+
"""Convert AST node to dictionary representation"""
|
|
288
|
+
if max_depth <= 0 or node is None:
|
|
289
|
+
return {'type': 'truncated'}
|
|
290
|
+
|
|
291
|
+
result = {
|
|
292
|
+
'type': type(node).__name__
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
# Add relevant attributes
|
|
296
|
+
if hasattr(node, 'value'):
|
|
297
|
+
result['value'] = str(node.value)
|
|
298
|
+
if hasattr(node, 'name') and hasattr(node.name, 'value'):
|
|
299
|
+
result['name'] = node.name.value
|
|
300
|
+
|
|
301
|
+
# Handle collections
|
|
302
|
+
if hasattr(node, 'statements'):
|
|
303
|
+
result['statements'] = [
|
|
304
|
+
self._ast_to_dict(stmt, max_depth - 1)
|
|
305
|
+
for stmt in (node.statements or [])
|
|
306
|
+
]
|
|
307
|
+
|
|
308
|
+
return result
|
|
309
|
+
|
|
310
|
+
def generate_report(self, violations: List[InvariantViolation]) -> str:
|
|
311
|
+
"""Generate human-readable report of violations"""
|
|
312
|
+
if not violations:
|
|
313
|
+
return "✅ All parser invariants validated successfully!\n"
|
|
314
|
+
|
|
315
|
+
report = []
|
|
316
|
+
report.append("=" * 70)
|
|
317
|
+
report.append("ZPICS VALIDATION REPORT")
|
|
318
|
+
report.append("=" * 70)
|
|
319
|
+
report.append("")
|
|
320
|
+
|
|
321
|
+
# Group by severity
|
|
322
|
+
critical = [v for v in violations if v.severity == 'critical']
|
|
323
|
+
warnings = [v for v in violations if v.severity == 'warning']
|
|
324
|
+
info = [v for v in violations if v.severity == 'info']
|
|
325
|
+
|
|
326
|
+
if critical:
|
|
327
|
+
report.append(f"❌ CRITICAL VIOLATIONS: {len(critical)}")
|
|
328
|
+
report.append("-" * 70)
|
|
329
|
+
for v in critical:
|
|
330
|
+
report.append(f" Test: {v.test_name}")
|
|
331
|
+
report.append(f" Type: {v.violation_type}")
|
|
332
|
+
report.append(f" {v.description}")
|
|
333
|
+
report.append(f" Expected: {v.expected}")
|
|
334
|
+
report.append(f" Actual: {v.actual}")
|
|
335
|
+
report.append("")
|
|
336
|
+
|
|
337
|
+
if warnings:
|
|
338
|
+
report.append(f"⚠️ WARNINGS: {len(warnings)}")
|
|
339
|
+
report.append("-" * 70)
|
|
340
|
+
for v in warnings:
|
|
341
|
+
report.append(f" Test: {v.test_name}")
|
|
342
|
+
report.append(f" {v.description}")
|
|
343
|
+
report.append("")
|
|
344
|
+
|
|
345
|
+
if info:
|
|
346
|
+
report.append(f"ℹ️ INFO: {len(info)}")
|
|
347
|
+
report.append("-" * 70)
|
|
348
|
+
for v in info:
|
|
349
|
+
report.append(f" Test: {v.test_name}")
|
|
350
|
+
report.append(f" {v.description}")
|
|
351
|
+
report.append("")
|
|
352
|
+
|
|
353
|
+
report.append("=" * 70)
|
|
354
|
+
report.append(f"Total violations: {len(violations)}")
|
|
355
|
+
report.append(f"Critical: {len(critical)} | Warnings: {len(warnings)} | Info: {len(info)}")
|
|
356
|
+
report.append("=" * 70)
|
|
357
|
+
|
|
358
|
+
return "\n".join(report)
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
def validate_parser_changes(golden_tests_dir: str = "tests/golden") -> bool:
|
|
362
|
+
"""
|
|
363
|
+
Main entry point for validating parser changes
|
|
364
|
+
|
|
365
|
+
Args:
|
|
366
|
+
golden_tests_dir: Directory containing golden test files
|
|
367
|
+
|
|
368
|
+
Returns:
|
|
369
|
+
True if all validations pass, False otherwise
|
|
370
|
+
"""
|
|
371
|
+
validator = ZPICSValidator()
|
|
372
|
+
all_violations = []
|
|
373
|
+
|
|
374
|
+
golden_dir = Path(golden_tests_dir)
|
|
375
|
+
if not golden_dir.exists():
|
|
376
|
+
print(f"⚠️ Golden tests directory not found: {golden_tests_dir}")
|
|
377
|
+
return True
|
|
378
|
+
|
|
379
|
+
# Process each golden test file
|
|
380
|
+
for test_file in golden_dir.glob("*.zx"):
|
|
381
|
+
test_name = test_file.stem
|
|
382
|
+
source_code = test_file.read_text()
|
|
383
|
+
|
|
384
|
+
# Create current snapshot
|
|
385
|
+
current = validator.create_snapshot(test_name, source_code)
|
|
386
|
+
|
|
387
|
+
# Load baseline
|
|
388
|
+
baseline = validator.load_snapshot(test_name)
|
|
389
|
+
|
|
390
|
+
if baseline is None:
|
|
391
|
+
# First time running - create baseline
|
|
392
|
+
print(f"📸 Creating baseline snapshot for: {test_name}")
|
|
393
|
+
validator.save_snapshot(test_name, current)
|
|
394
|
+
continue
|
|
395
|
+
|
|
396
|
+
# Validate against baseline
|
|
397
|
+
violations = validator.validate_snapshot(test_name, current, baseline)
|
|
398
|
+
all_violations.extend(violations)
|
|
399
|
+
|
|
400
|
+
# Generate and print report
|
|
401
|
+
if all_violations:
|
|
402
|
+
print(validator.generate_report(all_violations))
|
|
403
|
+
return False
|
|
404
|
+
else:
|
|
405
|
+
print("✅ All parser invariants validated successfully!")
|
|
406
|
+
print(f" Tested {len(list(golden_dir.glob('*.zx')))} golden test cases")
|
|
407
|
+
return True
|