tnfr 4.5.1__py3-none-any.whl → 6.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (170) hide show
  1. tnfr/__init__.py +270 -90
  2. tnfr/__init__.pyi +40 -0
  3. tnfr/_compat.py +11 -0
  4. tnfr/_version.py +7 -0
  5. tnfr/_version.pyi +7 -0
  6. tnfr/alias.py +631 -0
  7. tnfr/alias.pyi +140 -0
  8. tnfr/cache.py +732 -0
  9. tnfr/cache.pyi +232 -0
  10. tnfr/callback_utils.py +381 -0
  11. tnfr/callback_utils.pyi +105 -0
  12. tnfr/cli/__init__.py +89 -0
  13. tnfr/cli/__init__.pyi +47 -0
  14. tnfr/cli/arguments.py +199 -0
  15. tnfr/cli/arguments.pyi +33 -0
  16. tnfr/cli/execution.py +322 -0
  17. tnfr/cli/execution.pyi +80 -0
  18. tnfr/cli/utils.py +34 -0
  19. tnfr/cli/utils.pyi +8 -0
  20. tnfr/config/__init__.py +12 -0
  21. tnfr/config/__init__.pyi +8 -0
  22. tnfr/config/constants.py +104 -0
  23. tnfr/config/constants.pyi +12 -0
  24. tnfr/config/init.py +36 -0
  25. tnfr/config/init.pyi +8 -0
  26. tnfr/config/operator_names.py +106 -0
  27. tnfr/config/operator_names.pyi +28 -0
  28. tnfr/config/presets.py +104 -0
  29. tnfr/config/presets.pyi +7 -0
  30. tnfr/constants/__init__.py +228 -0
  31. tnfr/constants/__init__.pyi +104 -0
  32. tnfr/constants/core.py +158 -0
  33. tnfr/constants/core.pyi +17 -0
  34. tnfr/constants/init.py +31 -0
  35. tnfr/constants/init.pyi +12 -0
  36. tnfr/constants/metric.py +102 -0
  37. tnfr/constants/metric.pyi +19 -0
  38. tnfr/constants_glyphs.py +16 -0
  39. tnfr/constants_glyphs.pyi +12 -0
  40. tnfr/dynamics/__init__.py +136 -0
  41. tnfr/dynamics/__init__.pyi +83 -0
  42. tnfr/dynamics/adaptation.py +201 -0
  43. tnfr/dynamics/aliases.py +22 -0
  44. tnfr/dynamics/coordination.py +343 -0
  45. tnfr/dynamics/dnfr.py +2315 -0
  46. tnfr/dynamics/dnfr.pyi +33 -0
  47. tnfr/dynamics/integrators.py +561 -0
  48. tnfr/dynamics/integrators.pyi +35 -0
  49. tnfr/dynamics/runtime.py +521 -0
  50. tnfr/dynamics/sampling.py +34 -0
  51. tnfr/dynamics/sampling.pyi +7 -0
  52. tnfr/dynamics/selectors.py +680 -0
  53. tnfr/execution.py +216 -0
  54. tnfr/execution.pyi +65 -0
  55. tnfr/flatten.py +283 -0
  56. tnfr/flatten.pyi +28 -0
  57. tnfr/gamma.py +320 -89
  58. tnfr/gamma.pyi +40 -0
  59. tnfr/glyph_history.py +337 -0
  60. tnfr/glyph_history.pyi +53 -0
  61. tnfr/grammar.py +23 -153
  62. tnfr/grammar.pyi +13 -0
  63. tnfr/helpers/__init__.py +151 -0
  64. tnfr/helpers/__init__.pyi +66 -0
  65. tnfr/helpers/numeric.py +88 -0
  66. tnfr/helpers/numeric.pyi +12 -0
  67. tnfr/immutable.py +214 -0
  68. tnfr/immutable.pyi +37 -0
  69. tnfr/initialization.py +199 -0
  70. tnfr/initialization.pyi +73 -0
  71. tnfr/io.py +311 -0
  72. tnfr/io.pyi +11 -0
  73. tnfr/locking.py +37 -0
  74. tnfr/locking.pyi +7 -0
  75. tnfr/metrics/__init__.py +41 -0
  76. tnfr/metrics/__init__.pyi +20 -0
  77. tnfr/metrics/coherence.py +1469 -0
  78. tnfr/metrics/common.py +149 -0
  79. tnfr/metrics/common.pyi +15 -0
  80. tnfr/metrics/core.py +259 -0
  81. tnfr/metrics/core.pyi +13 -0
  82. tnfr/metrics/diagnosis.py +840 -0
  83. tnfr/metrics/diagnosis.pyi +89 -0
  84. tnfr/metrics/export.py +151 -0
  85. tnfr/metrics/glyph_timing.py +369 -0
  86. tnfr/metrics/reporting.py +152 -0
  87. tnfr/metrics/reporting.pyi +12 -0
  88. tnfr/metrics/sense_index.py +294 -0
  89. tnfr/metrics/sense_index.pyi +9 -0
  90. tnfr/metrics/trig.py +216 -0
  91. tnfr/metrics/trig.pyi +12 -0
  92. tnfr/metrics/trig_cache.py +105 -0
  93. tnfr/metrics/trig_cache.pyi +10 -0
  94. tnfr/node.py +255 -177
  95. tnfr/node.pyi +161 -0
  96. tnfr/observers.py +154 -150
  97. tnfr/observers.pyi +46 -0
  98. tnfr/ontosim.py +135 -134
  99. tnfr/ontosim.pyi +33 -0
  100. tnfr/operators/__init__.py +452 -0
  101. tnfr/operators/__init__.pyi +31 -0
  102. tnfr/operators/definitions.py +181 -0
  103. tnfr/operators/definitions.pyi +92 -0
  104. tnfr/operators/jitter.py +266 -0
  105. tnfr/operators/jitter.pyi +11 -0
  106. tnfr/operators/registry.py +80 -0
  107. tnfr/operators/registry.pyi +15 -0
  108. tnfr/operators/remesh.py +569 -0
  109. tnfr/presets.py +10 -23
  110. tnfr/presets.pyi +7 -0
  111. tnfr/py.typed +0 -0
  112. tnfr/rng.py +440 -0
  113. tnfr/rng.pyi +14 -0
  114. tnfr/selector.py +217 -0
  115. tnfr/selector.pyi +19 -0
  116. tnfr/sense.py +307 -142
  117. tnfr/sense.pyi +30 -0
  118. tnfr/structural.py +69 -164
  119. tnfr/structural.pyi +46 -0
  120. tnfr/telemetry/__init__.py +13 -0
  121. tnfr/telemetry/verbosity.py +37 -0
  122. tnfr/tokens.py +61 -0
  123. tnfr/tokens.pyi +41 -0
  124. tnfr/trace.py +520 -95
  125. tnfr/trace.pyi +68 -0
  126. tnfr/types.py +382 -17
  127. tnfr/types.pyi +145 -0
  128. tnfr/utils/__init__.py +158 -0
  129. tnfr/utils/__init__.pyi +133 -0
  130. tnfr/utils/cache.py +755 -0
  131. tnfr/utils/cache.pyi +156 -0
  132. tnfr/utils/data.py +267 -0
  133. tnfr/utils/data.pyi +73 -0
  134. tnfr/utils/graph.py +87 -0
  135. tnfr/utils/graph.pyi +10 -0
  136. tnfr/utils/init.py +746 -0
  137. tnfr/utils/init.pyi +85 -0
  138. tnfr/utils/io.py +157 -0
  139. tnfr/utils/io.pyi +10 -0
  140. tnfr/utils/validators.py +130 -0
  141. tnfr/utils/validators.pyi +19 -0
  142. tnfr/validation/__init__.py +25 -0
  143. tnfr/validation/__init__.pyi +17 -0
  144. tnfr/validation/compatibility.py +59 -0
  145. tnfr/validation/compatibility.pyi +8 -0
  146. tnfr/validation/grammar.py +149 -0
  147. tnfr/validation/grammar.pyi +11 -0
  148. tnfr/validation/rules.py +194 -0
  149. tnfr/validation/rules.pyi +18 -0
  150. tnfr/validation/syntax.py +151 -0
  151. tnfr/validation/syntax.pyi +7 -0
  152. tnfr-6.0.0.dist-info/METADATA +135 -0
  153. tnfr-6.0.0.dist-info/RECORD +157 -0
  154. tnfr/cli.py +0 -322
  155. tnfr/config.py +0 -41
  156. tnfr/constants.py +0 -277
  157. tnfr/dynamics.py +0 -814
  158. tnfr/helpers.py +0 -264
  159. tnfr/main.py +0 -47
  160. tnfr/metrics.py +0 -597
  161. tnfr/operators.py +0 -525
  162. tnfr/program.py +0 -176
  163. tnfr/scenarios.py +0 -34
  164. tnfr/validators.py +0 -38
  165. tnfr-4.5.1.dist-info/METADATA +0 -221
  166. tnfr-4.5.1.dist-info/RECORD +0 -28
  167. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/WHEEL +0 -0
  168. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/entry_points.txt +0 -0
  169. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/licenses/LICENSE.md +0 -0
  170. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/top_level.txt +0 -0
tnfr/execution.py ADDED
@@ -0,0 +1,216 @@
1
+ """Execution helpers for canonical TNFR programs."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections import deque
6
+ from collections.abc import Callable, Iterable, Sequence
7
+ from typing import Any, Optional, cast
8
+
9
+ from ._compat import TypeAlias
10
+
11
+ from .utils import MAX_MATERIALIZE_DEFAULT, ensure_collection, is_non_string_sequence
12
+ from .constants import get_param
13
+ from .dynamics import step
14
+ from .flatten import _flatten
15
+ from .glyph_history import ensure_history
16
+ from .validation.grammar import apply_glyph_with_grammar
17
+ from .tokens import OpTag, TARGET, THOL, WAIT, Token
18
+ from .types import Glyph, NodeId, TNFRGraph
19
+
20
+ AdvanceFn = Callable[[TNFRGraph], None]
21
+ TraceEntry = dict[str, Any]
22
+ ProgramTrace: TypeAlias = deque[TraceEntry]
23
+ HandlerFn = Callable[
24
+ [TNFRGraph, Any, Optional[Sequence[NodeId]], ProgramTrace, AdvanceFn],
25
+ Optional[Sequence[NodeId]],
26
+ ]
27
+
28
+ __all__ = [
29
+ "AdvanceFn",
30
+ "CANONICAL_PRESET_NAME",
31
+ "CANONICAL_PROGRAM_TOKENS",
32
+ "HANDLERS",
33
+ "_apply_glyph_to_targets",
34
+ "_record_trace",
35
+ "compile_sequence",
36
+ "basic_canonical_example",
37
+ "block",
38
+ "play",
39
+ "seq",
40
+ "target",
41
+ "wait",
42
+ ]
43
+
44
+
45
+ CANONICAL_PRESET_NAME = "canonical_example"
46
+ CANONICAL_PROGRAM_TOKENS: tuple[Token, ...] = (
47
+ Glyph.SHA,
48
+ Glyph.AL,
49
+ Glyph.RA,
50
+ Glyph.ZHIR,
51
+ Glyph.NUL,
52
+ Glyph.THOL,
53
+ )
54
+
55
+
56
+ def _window(G: TNFRGraph) -> int:
57
+ return int(get_param(G, "GLYPH_HYSTERESIS_WINDOW"))
58
+
59
+
60
+ def _apply_glyph_to_targets(
61
+ G: TNFRGraph, g: Glyph | str, nodes: Optional[Iterable[NodeId]] = None
62
+ ) -> None:
63
+ """Apply ``g`` to ``nodes`` (or all nodes) respecting the grammar."""
64
+
65
+ nodes_iter = G.nodes() if nodes is None else nodes
66
+ w = _window(G)
67
+ apply_glyph_with_grammar(G, nodes_iter, g, w)
68
+
69
+
70
+ def _advance(G: TNFRGraph, step_fn: AdvanceFn) -> None:
71
+ step_fn(G)
72
+
73
+
74
+ def _record_trace(trace: ProgramTrace, G: TNFRGraph, op: OpTag, **data: Any) -> None:
75
+ trace.append({"t": float(G.graph.get("_t", 0.0)), "op": op.name, **data})
76
+
77
+
78
+ def _advance_and_record(
79
+ G: TNFRGraph,
80
+ trace: ProgramTrace,
81
+ label: OpTag,
82
+ step_fn: AdvanceFn,
83
+ *,
84
+ times: int = 1,
85
+ **data: Any,
86
+ ) -> None:
87
+ for _ in range(times):
88
+ _advance(G, step_fn)
89
+ _record_trace(trace, G, label, **data)
90
+
91
+
92
+ def _handle_target(
93
+ G: TNFRGraph,
94
+ payload: TARGET,
95
+ _curr_target: Optional[Sequence[NodeId]],
96
+ trace: ProgramTrace,
97
+ _step_fn: AdvanceFn,
98
+ ) -> Sequence[NodeId]:
99
+ """Handle a ``TARGET`` token and return the active node set."""
100
+
101
+ nodes_src = G.nodes() if payload.nodes is None else payload.nodes
102
+ nodes = ensure_collection(nodes_src, max_materialize=None)
103
+ if is_non_string_sequence(nodes):
104
+ curr_target = cast(Sequence[NodeId], nodes)
105
+ else:
106
+ curr_target = tuple(nodes)
107
+ _record_trace(trace, G, OpTag.TARGET, n=len(curr_target))
108
+ return curr_target
109
+
110
+
111
+ def _handle_wait(
112
+ G: TNFRGraph,
113
+ steps: int,
114
+ curr_target: Optional[Sequence[NodeId]],
115
+ trace: ProgramTrace,
116
+ step_fn: AdvanceFn,
117
+ ) -> Optional[Sequence[NodeId]]:
118
+ _advance_and_record(G, trace, OpTag.WAIT, step_fn, times=steps, k=steps)
119
+ return curr_target
120
+
121
+
122
+ def _handle_glyph(
123
+ G: TNFRGraph,
124
+ g: Glyph | str,
125
+ curr_target: Optional[Sequence[NodeId]],
126
+ trace: ProgramTrace,
127
+ step_fn: AdvanceFn,
128
+ label: OpTag = OpTag.GLYPH,
129
+ ) -> Optional[Sequence[NodeId]]:
130
+ _apply_glyph_to_targets(G, g, curr_target)
131
+ _advance_and_record(G, trace, label, step_fn, g=g)
132
+ return curr_target
133
+
134
+
135
+ def _handle_thol(
136
+ G: TNFRGraph,
137
+ g: Glyph | str | None,
138
+ curr_target: Optional[Sequence[NodeId]],
139
+ trace: ProgramTrace,
140
+ step_fn: AdvanceFn,
141
+ ) -> Optional[Sequence[NodeId]]:
142
+ return _handle_glyph(
143
+ G, g or Glyph.THOL.value, curr_target, trace, step_fn, label=OpTag.THOL
144
+ )
145
+
146
+
147
+ HANDLERS: dict[OpTag, HandlerFn] = {
148
+ OpTag.TARGET: _handle_target,
149
+ OpTag.WAIT: _handle_wait,
150
+ OpTag.GLYPH: _handle_glyph,
151
+ OpTag.THOL: _handle_thol,
152
+ }
153
+
154
+
155
+ def play(
156
+ G: TNFRGraph, sequence: Sequence[Token], step_fn: Optional[AdvanceFn] = None
157
+ ) -> None:
158
+ """Execute a canonical sequence on graph ``G``."""
159
+
160
+ step_fn = step_fn or step
161
+
162
+ curr_target: Optional[Sequence[NodeId]] = None
163
+
164
+ history = ensure_history(G)
165
+ maxlen = int(get_param(G, "PROGRAM_TRACE_MAXLEN"))
166
+ trace_obj = history.get("program_trace")
167
+ trace: ProgramTrace
168
+ if not isinstance(trace_obj, deque) or trace_obj.maxlen != maxlen:
169
+ trace = cast(ProgramTrace, deque(trace_obj or [], maxlen=maxlen))
170
+ history["program_trace"] = trace
171
+ else:
172
+ trace = cast(ProgramTrace, trace_obj)
173
+
174
+ for op, payload in _flatten(sequence):
175
+ handler: HandlerFn | None = HANDLERS.get(op)
176
+ if handler is None:
177
+ raise ValueError(f"Unknown operation: {op}")
178
+ curr_target = handler(G, payload, curr_target, trace, step_fn)
179
+
180
+
181
+ def compile_sequence(
182
+ sequence: Iterable[Token] | Sequence[Token] | Any,
183
+ *,
184
+ max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
185
+ ) -> list[tuple[OpTag, Any]]:
186
+ """Return the operations executed by :func:`play` for ``sequence``."""
187
+
188
+ return _flatten(sequence, max_materialize=max_materialize)
189
+
190
+
191
+ def seq(*tokens: Token) -> list[Token]:
192
+ return list(tokens)
193
+
194
+
195
+ def block(
196
+ *tokens: Token, repeat: int = 1, close: Optional[Glyph] = None
197
+ ) -> THOL:
198
+ return THOL(body=list(tokens), repeat=repeat, force_close=close)
199
+
200
+
201
+ def target(nodes: Optional[Iterable[NodeId]] = None) -> TARGET:
202
+ return TARGET(nodes=nodes)
203
+
204
+
205
+ def wait(steps: int = 1) -> WAIT:
206
+ return WAIT(steps=max(1, int(steps)))
207
+
208
+
209
+ def basic_canonical_example() -> list[Token]:
210
+ """Reference canonical sequence.
211
+
212
+ Returns a copy of the canonical preset tokens to keep CLI defaults aligned
213
+ with :func:`tnfr.config.presets.get_preset`.
214
+ """
215
+
216
+ return list(CANONICAL_PROGRAM_TOKENS)
tnfr/execution.pyi ADDED
@@ -0,0 +1,65 @@
1
+ from __future__ import annotations
2
+
3
+ from collections import deque
4
+ from collections.abc import Callable, Iterable, Sequence
5
+ from typing import Any, Optional
6
+
7
+ from ._compat import TypeAlias
8
+
9
+ from .tokens import OpTag, TARGET, THOL, WAIT, Token
10
+ from .types import Glyph, NodeId, TNFRGraph
11
+
12
+ __all__: list[str]
13
+
14
+
15
+ def __getattr__(name: str) -> Any: ...
16
+
17
+
18
+ AdvanceFn = Callable[[TNFRGraph], None]
19
+ TraceEntry = dict[str, Any]
20
+ ProgramTrace: TypeAlias = deque[TraceEntry]
21
+ HandlerFn = Callable[
22
+ [TNFRGraph, Any, Sequence[NodeId] | None, ProgramTrace, AdvanceFn],
23
+ Sequence[NodeId] | None,
24
+ ]
25
+
26
+ CANONICAL_PRESET_NAME: str
27
+ CANONICAL_PROGRAM_TOKENS: tuple[Token, ...]
28
+ HANDLERS: dict[OpTag, HandlerFn]
29
+
30
+
31
+ def _apply_glyph_to_targets(
32
+ G: TNFRGraph, g: Glyph | str, nodes: Iterable[NodeId] | None = ...
33
+ ) -> None: ...
34
+
35
+
36
+ def _record_trace(trace: ProgramTrace, G: TNFRGraph, op: OpTag, **data: Any) -> None: ...
37
+
38
+
39
+ def compile_sequence(
40
+ sequence: Iterable[Token] | Sequence[Token] | Any,
41
+ *,
42
+ max_materialize: int | None = ...,
43
+ ) -> list[tuple[OpTag, Any]]: ...
44
+
45
+
46
+ def play(
47
+ G: TNFRGraph, sequence: Sequence[Token], step_fn: Optional[AdvanceFn] = ...
48
+ ) -> None: ...
49
+
50
+
51
+ def seq(*tokens: Token) -> list[Token]: ...
52
+
53
+
54
+ def block(
55
+ *tokens: Token, repeat: int = ..., close: Glyph | None = ...
56
+ ) -> THOL: ...
57
+
58
+
59
+ def target(nodes: Iterable[NodeId] | None = ...) -> TARGET: ...
60
+
61
+
62
+ def wait(steps: int = ...) -> WAIT: ...
63
+
64
+
65
+ def basic_canonical_example() -> list[Token]: ...
tnfr/flatten.py ADDED
@@ -0,0 +1,283 @@
1
+ """Flattening utilities to compile TNFR token sequences."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections.abc import Collection, Iterable, Mapping, Sequence
6
+ from dataclasses import dataclass
7
+ from itertools import chain
8
+ from typing import Any, Callable
9
+
10
+ from .utils import (
11
+ MAX_MATERIALIZE_DEFAULT,
12
+ STRING_TYPES,
13
+ ensure_collection,
14
+ flatten_structure,
15
+ normalize_materialize_limit,
16
+ )
17
+ from .config.constants import GLYPHS_CANONICAL_SET
18
+ from .tokens import THOL, TARGET, WAIT, OpTag, THOL_SENTINEL, Token
19
+ from .types import Glyph
20
+
21
+ __all__ = [
22
+ "THOLEvaluator",
23
+ "parse_program_tokens",
24
+ ]
25
+
26
+
27
+ @dataclass
28
+ class TholFrame:
29
+ """Execution frame used to evaluate nested THOL blocks."""
30
+
31
+ seq: Sequence[Token]
32
+ index: int
33
+ remaining: int
34
+ closing: Glyph | None
35
+
36
+
37
+ def _iter_source(
38
+ seq: Iterable[Token] | Sequence[Token] | Any,
39
+ *,
40
+ max_materialize: int | None,
41
+ ) -> Iterable[Any]:
42
+ """Yield items from ``seq`` enforcing ``max_materialize`` when needed."""
43
+
44
+ if isinstance(seq, Collection) and not isinstance(seq, STRING_TYPES):
45
+ return seq
46
+
47
+ if isinstance(seq, STRING_TYPES):
48
+ return (seq,)
49
+
50
+ if not isinstance(seq, Iterable):
51
+ raise TypeError(f"{seq!r} is not iterable")
52
+
53
+ limit = normalize_materialize_limit(max_materialize)
54
+ if limit is None:
55
+ return seq
56
+ if limit == 0:
57
+ return ()
58
+
59
+ iterator = iter(seq)
60
+
61
+ def _preview() -> Iterable[Any]:
62
+ for idx, item in enumerate(iterator):
63
+ yield item
64
+ if idx >= limit:
65
+ break
66
+
67
+ preview = ensure_collection(
68
+ _preview(),
69
+ max_materialize=limit,
70
+ )
71
+
72
+ if not preview:
73
+ return ()
74
+
75
+ return chain(preview, iterator)
76
+
77
+
78
+ def _push_thol_frame(
79
+ frames: list[TholFrame],
80
+ item: THOL,
81
+ *,
82
+ max_materialize: int | None,
83
+ ) -> None:
84
+ """Validate ``item`` and append a frame for its evaluation."""
85
+
86
+ repeats = int(item.repeat)
87
+ if repeats < 1:
88
+ raise ValueError("repeat must be ≥1")
89
+ if item.force_close is not None and not isinstance(item.force_close, Glyph):
90
+ raise ValueError("force_close must be a Glyph")
91
+ closing = (
92
+ item.force_close
93
+ if isinstance(item.force_close, Glyph)
94
+ and item.force_close in {Glyph.SHA, Glyph.NUL}
95
+ else None
96
+ )
97
+ seq0 = ensure_collection(
98
+ item.body,
99
+ max_materialize=max_materialize,
100
+ error_msg=f"THOL body exceeds max_materialize={max_materialize}",
101
+ )
102
+ frames.append(
103
+ TholFrame(
104
+ seq=seq0,
105
+ index=0,
106
+ remaining=repeats,
107
+ closing=closing,
108
+ )
109
+ )
110
+
111
+
112
+ class THOLEvaluator:
113
+ """Generator that expands a :class:`THOL` block lazily."""
114
+
115
+ def __init__(
116
+ self,
117
+ item: THOL,
118
+ *,
119
+ max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
120
+ ) -> None:
121
+ self._frames: list[TholFrame] = []
122
+ _push_thol_frame(self._frames, item, max_materialize=max_materialize)
123
+ self._max_materialize = max_materialize
124
+ self._started = False
125
+
126
+ def __iter__(self) -> "THOLEvaluator":
127
+ return self
128
+
129
+ def __next__(self) -> Token | object:
130
+ if not self._started:
131
+ self._started = True
132
+ return THOL_SENTINEL
133
+ while self._frames:
134
+ frame = self._frames[-1]
135
+ seq = frame.seq
136
+ idx = frame.index
137
+ if idx < len(seq):
138
+ token = seq[idx]
139
+ frame.index = idx + 1
140
+ if isinstance(token, THOL):
141
+ _push_thol_frame(
142
+ self._frames,
143
+ token,
144
+ max_materialize=self._max_materialize,
145
+ )
146
+ return THOL_SENTINEL
147
+ return token
148
+ else:
149
+ cl = frame.closing
150
+ frame.remaining -= 1
151
+ if frame.remaining > 0:
152
+ frame.index = 0
153
+ else:
154
+ self._frames.pop()
155
+ if cl is not None:
156
+ return cl
157
+ raise StopIteration
158
+
159
+
160
+ def _flatten_target(
161
+ item: TARGET,
162
+ ops: list[tuple[OpTag, Any]],
163
+ ) -> None:
164
+ ops.append((OpTag.TARGET, item))
165
+
166
+
167
+ def _flatten_wait(
168
+ item: WAIT,
169
+ ops: list[tuple[OpTag, Any]],
170
+ ) -> None:
171
+ steps = max(1, int(getattr(item, "steps", 1)))
172
+ ops.append((OpTag.WAIT, steps))
173
+
174
+
175
+ def _flatten_glyph(
176
+ item: Glyph | str,
177
+ ops: list[tuple[OpTag, Any]],
178
+ ) -> None:
179
+ g = item.value if isinstance(item, Glyph) else str(item)
180
+ if g not in GLYPHS_CANONICAL_SET:
181
+ raise ValueError(f"Non-canonical glyph: {g}")
182
+ ops.append((OpTag.GLYPH, g))
183
+
184
+
185
+ _TOKEN_DISPATCH: dict[type, Callable[[Any, list[tuple[OpTag, Any]]], None]] = {
186
+ TARGET: _flatten_target,
187
+ WAIT: _flatten_wait,
188
+ Glyph: _flatten_glyph,
189
+ str: _flatten_glyph,
190
+ }
191
+
192
+
193
+ def _coerce_mapping_token(
194
+ mapping: Mapping[str, Any],
195
+ *,
196
+ max_materialize: int | None,
197
+ ) -> Token:
198
+ if len(mapping) != 1:
199
+ raise ValueError(f"Invalid token mapping: {mapping!r}")
200
+ key, value = next(iter(mapping.items()))
201
+ if key == "WAIT":
202
+ return WAIT(int(value))
203
+ if key == "TARGET":
204
+ return TARGET(value)
205
+ if key != "THOL":
206
+ raise ValueError(f"Unrecognized token: {key!r}")
207
+ if not isinstance(value, Mapping):
208
+ raise TypeError("THOL specification must be a mapping")
209
+
210
+ close = value.get("close")
211
+ if isinstance(close, str):
212
+ close_enum = Glyph.__members__.get(close)
213
+ if close_enum is None:
214
+ raise ValueError(f"Unknown closing glyph: {close!r}")
215
+ close = close_enum
216
+ elif close is not None and not isinstance(close, Glyph):
217
+ raise TypeError("THOL close glyph must be a Glyph or string name")
218
+
219
+ body = parse_program_tokens(value.get("body", []), max_materialize=max_materialize)
220
+ repeat = int(value.get("repeat", 1))
221
+ return THOL(body=body, repeat=repeat, force_close=close)
222
+
223
+
224
+ def parse_program_tokens(
225
+ obj: Iterable[Any] | Sequence[Any] | Any,
226
+ *,
227
+ max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
228
+ ) -> list[Token]:
229
+ """Materialize ``obj`` into a list of canonical tokens.
230
+
231
+ The function accepts the same iterables handled by :func:`_flatten`,
232
+ including dictionaries describing ``WAIT``, ``TARGET`` and ``THOL`` tokens.
233
+ Nested iterables are flattened following :func:`flatten_structure` rules.
234
+ """
235
+
236
+ sequence = _iter_source(obj, max_materialize=max_materialize)
237
+
238
+ def _expand(item: Any) -> Iterable[Any] | None:
239
+ if isinstance(item, Mapping):
240
+ return (_coerce_mapping_token(item, max_materialize=max_materialize),)
241
+ return None
242
+
243
+ tokens: list[Token] = []
244
+ for item in flatten_structure(sequence, expand=_expand):
245
+ if isinstance(item, (Glyph, WAIT, TARGET, THOL, str)):
246
+ tokens.append(item)
247
+ continue
248
+ raise TypeError(f"Unsupported token: {item!r}")
249
+ return tokens
250
+
251
+
252
+ def _flatten(
253
+ seq: Iterable[Token] | Sequence[Token] | Any,
254
+ *,
255
+ max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
256
+ ) -> list[tuple[OpTag, Any]]:
257
+ """Return a list of operations ``(op, payload)`` where ``op`` ∈ :class:`OpTag`."""
258
+
259
+ ops: list[tuple[OpTag, Any]] = []
260
+ sequence = _iter_source(seq, max_materialize=max_materialize)
261
+
262
+ def _expand(item: Any) -> Iterable[Any] | None:
263
+ if isinstance(item, THOL):
264
+ return THOLEvaluator(item, max_materialize=max_materialize)
265
+ if isinstance(item, Mapping):
266
+ token = _coerce_mapping_token(item, max_materialize=max_materialize)
267
+ return (token,)
268
+ return None
269
+
270
+ for item in flatten_structure(sequence, expand=_expand):
271
+ if item is THOL_SENTINEL:
272
+ ops.append((OpTag.THOL, Glyph.THOL.value))
273
+ continue
274
+ handler = _TOKEN_DISPATCH.get(type(item))
275
+ if handler is None:
276
+ for cls, candidate in _TOKEN_DISPATCH.items():
277
+ if isinstance(item, cls):
278
+ handler = candidate
279
+ break
280
+ if handler is None:
281
+ raise TypeError(f"Unsupported token: {item!r}")
282
+ handler(item, ops)
283
+ return ops
tnfr/flatten.pyi ADDED
@@ -0,0 +1,28 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Iterable, Iterator, Sequence
4
+ from typing import Any
5
+
6
+ from .tokens import THOL, Token
7
+
8
+ __all__: list[str]
9
+
10
+
11
+ def __getattr__(name: str) -> Any: ...
12
+
13
+
14
+ class THOLEvaluator(Iterator[Token | object]):
15
+ def __init__(
16
+ self, item: THOL, *, max_materialize: int | None = ...
17
+ ) -> None: ...
18
+
19
+ def __iter__(self) -> THOLEvaluator: ...
20
+
21
+ def __next__(self) -> Token | object: ...
22
+
23
+
24
+ def parse_program_tokens(
25
+ obj: Iterable[Any] | Sequence[Any] | Any,
26
+ *,
27
+ max_materialize: int | None = ...,
28
+ ) -> list[Token]: ...