pynterp 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,6 @@
1
+ __pycache__/
2
+ *.py[cod]
3
+ .pytest_cache/
4
+ .ruff_cache/
5
+ .venv/
6
+ .todo-loop/
pynterp-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,5 @@
1
+ Metadata-Version: 2.4
2
+ Name: pynterp
3
+ Version: 0.1.0
4
+ Summary: AST-walk interpreter for a meaningful Python subset
5
+ Requires-Python: >=3.10
@@ -0,0 +1,135 @@
1
+ # pynterp
2
+
3
+ `pynterp` is an AST-walk interpreter for a substantial, tested subset of Python.
4
+
5
+ ## Project focus
6
+
7
+ - readable interpreter internals (AST + explicit runtime scopes)
8
+ - deterministic execution through explicit environments
9
+ - measured behavior against CPython tests
10
+
11
+ ## Compatibility
12
+
13
+ - host runtime: CPython `3.10+` (`requires-python >=3.10`)
14
+ - language support tracks modern CPython AST features, including:
15
+ - structural pattern matching (`match` / `case`)
16
+ - exception groups (`except*`)
17
+ - generic type parameter syntax and runtime typing objects
18
+ - template strings (`TemplateStr`) when running on Python `3.14+`
19
+
20
+ ## Quickstart
21
+
22
+ ```pycon
23
+ >>> from pynterp import Interpreter
24
+ >>> interpreter = Interpreter()
25
+ >>> env = interpreter.make_default_env()
26
+ >>> run_result = interpreter.run("""
27
+ ... print("Hello, World!")
28
+ ... """, env=env)
29
+ Hello, World!
30
+ ```
31
+
32
+ ## What is implemented
33
+
34
+ ### Core execution model
35
+
36
+ - explicit env execution: `Interpreter.run(source, env=...)` requires a dict
37
+ - no implicit inheritance of host globals/builtins
38
+ - `RunResult` return model captures uncaught exceptions without forcing immediate raise
39
+ - interpreter-aware handling for `globals()`, `locals()`, `vars()`, `dir()`, `eval()`, and `exec()`
40
+
41
+ ### Statement support
42
+
43
+ - assignment forms: `=`, annotated assignment, augmented assignment, destructuring/starred targets
44
+ - control flow: `if`, `while`, `for`, `break`, `continue`, loop `else`
45
+ - function forms: `def`, `async def`, `return`, `global`, `nonlocal`
46
+ - class definitions with metaclass namespace handling (`__prepare__`) and private-name mangling
47
+ - exception handling: `try/except/else/finally`, `raise`, `raise ... from ...`, `try/except*`
48
+ - context managers: `with`, `async with`
49
+ - pattern matching: value/singleton/sequence/mapping/class/or/as patterns with guards
50
+ - imports: `import`, `from ... import ...`, optional relative import support
51
+ - type alias statement (`type X = ...`) with type parameter support
52
+
53
+ ### Expression support
54
+
55
+ - scalar and operator forms: constants, names, unary/binary/bool ops, chained comparisons, conditional expressions, walrus
56
+ - calls with CPython-like argument binding checks (`*args`, `**kwargs`, duplicate-key errors)
57
+ - containers and indexing: list/tuple/set/dict literals, starred unpacking, attributes, subscripts, slices
58
+ - string forms: f-strings and template strings (runtime-dependent for 3.14 features)
59
+ - functional forms: lambdas, comprehensions, generator expressions, `yield`, `yield from`, `await`
60
+
61
+ ### Functions, scopes, and runtime semantics
62
+
63
+ - lexical scoping with closure cells and `nonlocal` behavior
64
+ - descriptor-correct method binding (`UserFunction` + `BoundMethod`)
65
+ - zero-argument `super()` support via `__class__` closure behavior
66
+ - function metadata and interoperability: defaults/kwdefaults mutation, annotations, signatures, weakrefs, pickling paths
67
+ - class/generic metadata wiring including `__qualname__`, `__orig_bases__`, and `__type_params__` where applicable
68
+
69
+ ### Async and generator runtime
70
+
71
+ - generator execution path mirrors statement/expression semantics in generator mode
72
+ - async function execution with awaitable protocol handling
73
+ - interpreted async generator object with `asend`, `athrow`, and `aclose` behavior
74
+
75
+ ### Environment controls and hardening
76
+
77
+ - allowlist-based import control (`allowed_imports`)
78
+ - safe builtins surface (for example no implicit `open`)
79
+ - guarded reflection pivots via wrapped `getattr`/`setattr`/`delattr`/`hasattr`
80
+ - blocked high-risk attributes include names such as `__mro__`, `__subclasses__`, `__globals__`, frame globals/locals, and related pivots
81
+
82
+ This is in-process hardening, not an OS sandbox.
83
+ Out of scope: CPU/memory/time quotas and process/kernel isolation.
84
+
85
+ ### Interpreted module loading
86
+
87
+ - optional interpreted package imports via `InterpretedModuleLoader`
88
+ - interpreted modules can import each other through the interpreter runtime
89
+ - the project can interpret code that imports and runs `pynterp` itself (see bootstrap tests)
90
+
91
+ ### Tests in this repo
92
+
93
+ - `~711` tests across CLI behavior, semantics, security hardening, keyword binding, `super()` semantics, bootstrap/self-interpretation checks, and probe correctness
94
+ - large dedicated security suite exercises reflective escape attempts and descriptor rebound edge cases
95
+
96
+ ## CPython compatibility probe
97
+
98
+ Probe script: [`scripts/cpython_pynterp_probe.py`](scripts/cpython_pynterp_probe.py)
99
+
100
+ Reproducible module-mode probe command (from script header):
101
+
102
+ ```bash
103
+ uv run python scripts/cpython_pynterp_probe.py \
104
+ --cpython-root /tmp/cpython-3.14 \
105
+ --python-exe /tmp/cpython-3.14/python.exe \
106
+ --basis tests \
107
+ --mode module \
108
+ --strict-worker-match \
109
+ --json-out /tmp/pynterp-probe-tests-module.json
110
+ ```
111
+
112
+ Default unsupported source filters used by the probe classifier:
113
+
114
+ - `__import__`
115
+ - `__dict__`
116
+ - `__code__`
117
+
118
+ Snapshot baseline (February 27, 2026; CPython `origin/3.14`, `module` mode, `basis=tests`):
119
+
120
+ - total test files: `762`
121
+ - applicable files: `515` (`67.59%`)
122
+ - not applicable files: `247`
123
+ - declared individual tests in applicable files: `10,761`
124
+ - discovered+run individual tests: `12,560`
125
+ - estimated individual tests total: `15,339`
126
+ - individual pass: `9,406`
127
+ - individual skip: `1,239`
128
+ - individual fail: `4,694`
129
+ - pass rate (individual): `61.32%`
130
+ - individual pass+skip rate: `69.40%`
131
+
132
+ `script` mode (`__name__ == "__main__"`) is much lower and mainly useful for diagnosing entry-point assumptions (same run, `--mode script`):
133
+
134
+ - individual pass rate: `4.11%`
135
+ - individual pass+skip rate: `7.92%`
@@ -0,0 +1,42 @@
1
+ [build-system]
2
+ requires = ["hatchling>=1.27.0"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "pynterp"
7
+ version = "0.1.0"
8
+ description = "AST-walk interpreter for a meaningful Python subset"
9
+ requires-python = ">=3.10"
10
+ dependencies = []
11
+
12
+ [dependency-groups]
13
+ dev = [
14
+ "ipython>=8.38.0",
15
+ "pytest>=8.4.0",
16
+ "ruff>=0.13.0",
17
+ ]
18
+
19
+ [tool.hatch.build.targets.wheel]
20
+ packages = ["src/pynterp"]
21
+
22
+ [tool.hatch.build.targets.sdist]
23
+ include = [
24
+ "src/pynterp",
25
+ "tests",
26
+ "pyproject.toml",
27
+ "README.md",
28
+ ]
29
+
30
+ [tool.uv]
31
+ default-groups = ["dev"]
32
+
33
+ [tool.pytest.ini_options]
34
+ addopts = "-ra"
35
+ testpaths = ["tests"]
36
+
37
+ [tool.ruff]
38
+ target-version = "py310"
39
+ line-length = 100
40
+
41
+ [tool.ruff.lint]
42
+ select = ["F", "I"]
@@ -0,0 +1,4 @@
1
+ from .core import RunResult
2
+ from .main import Interpreter
3
+
4
+ __all__ = ["Interpreter", "RunResult"]
@@ -0,0 +1,57 @@
1
+ import argparse
2
+ import sys
3
+ import traceback
4
+ from pathlib import Path
5
+
6
+ from .main import Interpreter
7
+
8
+
9
+ def _build_parser() -> argparse.ArgumentParser:
10
+ parser = argparse.ArgumentParser(
11
+ prog="python -m pynterp",
12
+ usage="python -m pynterp <script.py>",
13
+ )
14
+ parser.add_argument("script")
15
+ return parser
16
+
17
+
18
+ def main(argv: list[str] | None = None) -> int:
19
+ parser = _build_parser()
20
+ args_list = sys.argv[1:] if argv is None else argv
21
+ try:
22
+ args = parser.parse_args(args_list)
23
+ except SystemExit as exc:
24
+ return int(exc.code)
25
+
26
+ script_path = Path(args.script).resolve()
27
+ if not script_path.is_file():
28
+ print(f"pynterp: script not found: {script_path}", file=sys.stderr)
29
+ return 2
30
+
31
+ source = script_path.read_text()
32
+ interpreter = Interpreter()
33
+ env = interpreter.make_default_env(
34
+ env={
35
+ "__file__": str(script_path),
36
+ "__package__": None,
37
+ },
38
+ name="__main__",
39
+ )
40
+ result = interpreter.run(source, env=env, filename=str(script_path))
41
+ if result.exception is not None:
42
+ exc = result.exception
43
+ if isinstance(exc, SystemExit):
44
+ code = exc.code
45
+ if code is None:
46
+ return 0
47
+ if isinstance(code, int):
48
+ return code
49
+ print(code, file=sys.stderr)
50
+ return 1
51
+ traceback.print_exception(exc, file=sys.stderr)
52
+ return 1
53
+ return 0
54
+
55
+
56
+ if __name__ == "__main__":
57
+ raise SystemExit(main())
@@ -0,0 +1,150 @@
1
+ from __future__ import annotations
2
+
3
+ import ast
4
+ import symtable
5
+ from typing import Dict, Set
6
+
7
+ from .symtable_utils import _table_frees
8
+
9
+
10
+ def _build_symtable(source: str, filename: str) -> symtable.SymbolTable:
11
+ try:
12
+ return symtable.symtable(source, filename, "exec")
13
+ except TypeError as exc:
14
+ # CPython 3.14's Lib/symtable.py passes module=<...> as a keyword-only
15
+ # argument to _symtable.symtable(). Older host runtimes reject that call.
16
+ if "_symtable.symtable() takes no keyword arguments" not in str(exc):
17
+ raise
18
+ import _symtable
19
+
20
+ raw_table = _symtable.symtable(source, filename, "exec")
21
+ new_symbol_table = getattr(symtable, "_newSymbolTable", None)
22
+ if callable(new_symbol_table):
23
+ return new_symbol_table(raw_table, filename)
24
+ raise
25
+
26
+
27
+ class ScopeInfo:
28
+ """
29
+ Per-function scope info needed for runtime name resolution.
30
+ """
31
+
32
+ def __init__(self, table: symtable.Function, cellvars: Set[str]):
33
+ self.table = table
34
+ self.locals: Set[str] = set(table.get_locals())
35
+ self.frees: Set[str] = set(table.get_frees())
36
+ self.cellvars: Set[str] = set(cellvars)
37
+ self.declared_globals: Set[str] = {
38
+ s.get_name() for s in table.get_symbols() if s.is_declared_global()
39
+ }
40
+
41
+
42
+ class ModuleCode:
43
+ """
44
+ Holds:
45
+ - parsed AST
46
+ - symtable tree
47
+ - a mapping from (type,name,lineno) -> table
48
+ - computed cellvars for each function table
49
+ """
50
+
51
+ def __init__(self, source: str, filename: str = "<pynterp>"):
52
+ self.source = source
53
+ self.filename = filename
54
+ self.tree = ast.parse(source, filename=filename, mode="exec")
55
+ self.sym_root = _build_symtable(source, filename)
56
+
57
+ self._tables_by_key: Dict[tuple[str, str, int], list[symtable.SymbolTable]] = {}
58
+ self._cellvars_by_id: Dict[int, Set[str]] = {}
59
+ self._lambda_occurrence_by_location: Dict[tuple[int, int], int] = {}
60
+
61
+ self._index_tables(self.sym_root)
62
+ self._index_lambda_occurrences()
63
+ self._compute_cellvars(self.sym_root)
64
+
65
+ def _index_tables(self, table: symtable.SymbolTable) -> None:
66
+ key = (table.get_type(), table.get_name(), table.get_lineno())
67
+ self._tables_by_key.setdefault(key, []).append(table)
68
+ for child in table.get_children():
69
+ self._index_tables(child)
70
+
71
+ def _compute_cellvars(self, table: symtable.SymbolTable) -> None:
72
+ for child in table.get_children():
73
+ self._compute_cellvars(child)
74
+
75
+ if table.get_type() == "function":
76
+ assert isinstance(table, symtable.Function)
77
+ locals_ = set(table.get_locals())
78
+ child_frees: Set[str] = set()
79
+ for child in table.get_children():
80
+ child_frees |= _table_frees(child)
81
+ self._cellvars_by_id[table.get_id()] = locals_ & child_frees
82
+ else:
83
+ self._cellvars_by_id[table.get_id()] = set()
84
+
85
+ def _index_lambda_occurrences(self) -> None:
86
+ lambda_counts_by_line: Dict[int, int] = {}
87
+
88
+ class Visitor(ast.NodeVisitor):
89
+ def _visit_lambda_default_exprs(self, node: ast.Lambda) -> None:
90
+ for default in node.args.defaults or []:
91
+ self.visit(default)
92
+ for kw_default in getattr(node.args, "kw_defaults", []) or []:
93
+ if kw_default is not None:
94
+ self.visit(kw_default)
95
+
96
+ def visit_Lambda(self, node: ast.Lambda) -> None:
97
+ # Match symtable ordering: lambda defaults are analyzed in the
98
+ # enclosing scope before the lambda scope itself is registered.
99
+ self._visit_lambda_default_exprs(node)
100
+ index = lambda_counts_by_line.get(node.lineno, 0)
101
+ lambda_counts_by_line[node.lineno] = index + 1
102
+ self_outer._lambda_occurrence_by_location[(node.lineno, node.col_offset)] = index
103
+ self.visit(node.body)
104
+
105
+ self_outer = self
106
+ Visitor().visit(self.tree)
107
+
108
+ def lookup_function_table(
109
+ self, fn_node: ast.FunctionDef | ast.AsyncFunctionDef
110
+ ) -> symtable.Function:
111
+ key = ("function", fn_node.name, fn_node.lineno)
112
+ tables = self._tables_by_key.get(key, [])
113
+ if not tables:
114
+ raise RuntimeError(
115
+ f"Could not find symtable for function {fn_node.name!r} at line {fn_node.lineno}"
116
+ )
117
+ if len(tables) > 1:
118
+ raise RuntimeError(
119
+ f"Ambiguous symtable match for function {fn_node.name!r} at line {fn_node.lineno}"
120
+ )
121
+ tbl = tables[0]
122
+ if not isinstance(tbl, symtable.Function):
123
+ raise RuntimeError("lookup_function_table returned non-function table")
124
+ return tbl
125
+
126
+ def lookup_lambda_table(self, lambda_node: ast.Lambda) -> symtable.Function:
127
+ key = ("function", "lambda", lambda_node.lineno)
128
+ tables = self._tables_by_key.get(key, [])
129
+ if not tables:
130
+ raise RuntimeError(
131
+ "Could not find symtable for lambda at "
132
+ f"line {lambda_node.lineno}:{lambda_node.col_offset}"
133
+ )
134
+ if len(tables) == 1:
135
+ tbl = tables[0]
136
+ else:
137
+ location = (lambda_node.lineno, lambda_node.col_offset)
138
+ index = self._lambda_occurrence_by_location.get(location)
139
+ if index is None or index >= len(tables):
140
+ raise RuntimeError(
141
+ "Ambiguous symtable match for lambda at "
142
+ f"line {lambda_node.lineno}:{lambda_node.col_offset}"
143
+ )
144
+ tbl = tables[index]
145
+ if not isinstance(tbl, symtable.Function):
146
+ raise RuntimeError("lookup_lambda_table returned non-function table")
147
+ return tbl
148
+
149
+ def scope_info_for(self, fn_table: symtable.Function) -> ScopeInfo:
150
+ return ScopeInfo(fn_table, self._cellvars_by_id.get(fn_table.get_id(), set()))
@@ -0,0 +1,44 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+
5
+ UNBOUND = object()
6
+ NO_DEFAULT = object()
7
+
8
+
9
+ class ControlFlowSignal(BaseException):
10
+ """Internal non-user exceptions used for control flow (return/break/continue)."""
11
+
12
+
13
+ class ReturnSignal(ControlFlowSignal):
14
+ def __init__(self, value: Any):
15
+ self.value = value
16
+
17
+
18
+ class BreakSignal(ControlFlowSignal):
19
+ pass
20
+
21
+
22
+ class ContinueSignal(ControlFlowSignal):
23
+ pass
24
+
25
+
26
+ class AwaitRequest:
27
+ """Internal value used to hand awaitables from the AST runner to async trampoline."""
28
+
29
+ __slots__ = ("awaitable",)
30
+
31
+ def __init__(self, awaitable: Any):
32
+ self.awaitable = awaitable
33
+
34
+
35
+ class Cell:
36
+ """A tiny closure cell."""
37
+
38
+ __slots__ = ("value",)
39
+
40
+ def __init__(self, value: Any = UNBOUND):
41
+ self.value = value
42
+
43
+ def __repr__(self) -> str:
44
+ return "<Cell UNBOUND>" if self.value is UNBOUND else f"<Cell {self.value!r}>"
@@ -0,0 +1,192 @@
1
+ import ast
2
+ from dataclasses import dataclass
3
+ from pathlib import Path
4
+ from types import ModuleType
5
+ from typing import Any, Dict, Iterator, Optional, Set
6
+
7
+ from pynterp.lib import (
8
+ InterpretedModuleLoader,
9
+ import_safe_stdlib_module,
10
+ make_safe_env,
11
+ )
12
+ from pynterp.lib.compat import maybe_patch_runtime_module
13
+
14
+ from .code import ModuleCode
15
+ from .scopes import ModuleScope, RuntimeScope
16
+
17
+
18
+ @dataclass(slots=True)
19
+ class RunResult:
20
+ globals: dict[str, Any]
21
+ exception: BaseException | None = None
22
+
23
+ @property
24
+ def ok(self) -> bool:
25
+ return self.exception is None
26
+
27
+ def raise_for_exception(self) -> None:
28
+ if self.exception is not None:
29
+ raise self.exception
30
+
31
+
32
+ class InterpreterCore:
33
+ def __init__(
34
+ self, allowed_imports: Optional[Set[str]] = None, allow_relative_imports: bool = False
35
+ ):
36
+ """
37
+ allowed_imports:
38
+ - None -> allow any import (NOT secure)
39
+ - set() -> block all imports
40
+ - {"math", "json"} -> allow only these roots (and their submodules)
41
+ """
42
+ self.allowed_imports = None if allowed_imports is None else set(allowed_imports)
43
+ self.allow_relative_imports = bool(allow_relative_imports)
44
+
45
+ # ----- restricted import -----
46
+
47
+ def _is_allowed_module(self, name: str) -> bool:
48
+ if self.allowed_imports is None:
49
+ return True
50
+ if not name:
51
+ return False
52
+ for allowed in self.allowed_imports:
53
+ if (
54
+ name == allowed
55
+ or name.startswith(allowed + ".")
56
+ or name.split(".", 1)[0] == allowed
57
+ ):
58
+ return True
59
+ return False
60
+
61
+ def _restricted_import(self, name, globals=None, locals=None, fromlist=(), level=0):
62
+ if level and not self.allow_relative_imports:
63
+ raise ImportError("relative imports are not supported by this interpreter")
64
+ if not self._is_allowed_module(name):
65
+ raise ImportError(f"import of '{name}' is not allowed")
66
+ module = import_safe_stdlib_module(name)
67
+ # Match __import__ behavior: without fromlist, return the top-level package.
68
+ if not fromlist and "." in name:
69
+ return self._adapt_runtime_value(import_safe_stdlib_module(name.split(".", 1)[0]))
70
+ return self._adapt_runtime_value(module)
71
+
72
+ def _adapt_runtime_value(self, value: Any) -> Any:
73
+ return maybe_patch_runtime_module(value)
74
+
75
+ def _import(self, name: str, scope: RuntimeScope, fromlist=(), level=0):
76
+ imp = scope.builtins.get("__import__")
77
+ if imp is None or not callable(imp):
78
+ raise ImportError("__import__ is not available in this environment")
79
+ value = imp(name, scope.globals, scope.globals, fromlist, level)
80
+ return self._adapt_runtime_value(value)
81
+
82
+ def make_default_env(
83
+ self,
84
+ env: Optional[dict] = None,
85
+ *,
86
+ name: str = "__main__",
87
+ package_root: str | Path | None = None,
88
+ package_name: str = "pynterp",
89
+ ) -> dict:
90
+ if env is None:
91
+ base: Dict[str, Any] = {}
92
+ elif isinstance(env, dict):
93
+ base = dict(env)
94
+ else:
95
+ raise TypeError("env must be dict or None")
96
+
97
+ loader: InterpretedModuleLoader | None = None
98
+ importer = self._restricted_import
99
+ if package_root is not None:
100
+ loader = InterpretedModuleLoader(
101
+ self,
102
+ package_name=package_name,
103
+ package_root=package_root,
104
+ fallback_importer=self._restricted_import,
105
+ )
106
+ importer = loader.import_module
107
+
108
+ out = make_safe_env(importer, env=base, name=name)
109
+ if loader is not None:
110
+ out.setdefault("__module_loader__", loader)
111
+ return out
112
+
113
+ # ----- run -----
114
+
115
+ def run(self, source: str, env: dict, filename: str = "<pynterp>") -> RunResult:
116
+ """
117
+ Execute `source` in a fresh AST interpreter module environment.
118
+
119
+ Returns a RunResult with globals and any uncaught exception.
120
+ """
121
+ if not isinstance(env, dict):
122
+ raise TypeError("env must be dict")
123
+ globals_dict = env
124
+
125
+ raw_builtins = globals_dict.get("__builtins__", {})
126
+ if raw_builtins is None:
127
+ builtins_dict: Dict[str, Any] = {}
128
+ elif isinstance(raw_builtins, dict):
129
+ builtins_dict = raw_builtins
130
+ elif isinstance(raw_builtins, ModuleType):
131
+ builtins_dict = raw_builtins.__dict__
132
+ else:
133
+ raise TypeError("__builtins__ must be dict, module, or None")
134
+
135
+ try:
136
+ code = ModuleCode(source, filename)
137
+ scope = ModuleScope(code, globals_dict, builtins_dict)
138
+ self.exec_module(code.tree, scope)
139
+ except BaseException as exc:
140
+ return RunResult(globals=globals_dict, exception=exc)
141
+ return RunResult(globals=globals_dict)
142
+
143
+ def run_or_raise(self, source: str, env: dict, filename: str = "<pynterp>") -> dict:
144
+ result = self.run(source, env, filename)
145
+ result.raise_for_exception()
146
+ return result.globals
147
+
148
+ # ----- dispatch (normal) -----
149
+
150
+ def exec_module(self, node: ast.Module, scope: RuntimeScope) -> None:
151
+ for stmt in node.body:
152
+ self.exec_stmt(stmt, scope)
153
+
154
+ def exec_block(self, stmts: list[ast.stmt], scope: RuntimeScope) -> None:
155
+ for stmt in stmts:
156
+ self.exec_stmt(stmt, scope)
157
+
158
+ def exec_stmt(self, node: ast.AST, scope: RuntimeScope) -> None:
159
+ m = getattr(self, f"exec_{node.__class__.__name__}", None)
160
+ if m is None:
161
+ raise NotImplementedError(f"Statement not supported: {node.__class__.__name__}")
162
+ m(node, scope)
163
+
164
+ def eval_expr(self, node: ast.AST, scope: RuntimeScope) -> Any:
165
+ m = getattr(self, f"eval_{node.__class__.__name__}", None)
166
+ if m is None:
167
+ raise NotImplementedError(f"Expression not supported: {node.__class__.__name__}")
168
+ return m(node, scope)
169
+
170
+ # ----- dispatch (generator-mode) -----
171
+ # These are Python generators so that `yield` in interpreted code maps to real Python yield.
172
+
173
+ def g_exec_block(self, stmts: list[ast.stmt], scope: RuntimeScope) -> Iterator[Any]:
174
+ for stmt in stmts:
175
+ yield from self.g_exec_stmt(stmt, scope)
176
+
177
+ def g_exec_stmt(self, node: ast.AST, scope: RuntimeScope) -> Iterator[Any]:
178
+ m = getattr(self, f"g_exec_{node.__class__.__name__}", None)
179
+ if m is None:
180
+ # fallback: run a non-yielding statement
181
+ self.exec_stmt(node, scope)
182
+ return
183
+ yield from m(node, scope)
184
+ if False:
185
+ yield None # keeps it a generator in all branches
186
+
187
+ def g_eval_expr(self, node: ast.AST, scope: RuntimeScope) -> Iterator[Any]:
188
+ m = getattr(self, f"g_eval_{node.__class__.__name__}", None)
189
+ if m is None:
190
+ return self.eval_expr(node, scope)
191
+ val = yield from m(node, scope)
192
+ return val