tnfr 4.5.1__py3-none-any.whl → 4.5.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +91 -90
- tnfr/alias.py +546 -0
- tnfr/cache.py +578 -0
- tnfr/callback_utils.py +388 -0
- tnfr/cli/__init__.py +75 -0
- tnfr/cli/arguments.py +177 -0
- tnfr/cli/execution.py +288 -0
- tnfr/cli/utils.py +36 -0
- tnfr/collections_utils.py +300 -0
- tnfr/config.py +19 -28
- tnfr/constants/__init__.py +174 -0
- tnfr/constants/core.py +159 -0
- tnfr/constants/init.py +31 -0
- tnfr/constants/metric.py +110 -0
- tnfr/constants_glyphs.py +98 -0
- tnfr/dynamics/__init__.py +658 -0
- tnfr/dynamics/dnfr.py +733 -0
- tnfr/dynamics/integrators.py +267 -0
- tnfr/dynamics/sampling.py +31 -0
- tnfr/execution.py +201 -0
- tnfr/flatten.py +283 -0
- tnfr/gamma.py +302 -88
- tnfr/glyph_history.py +290 -0
- tnfr/grammar.py +285 -96
- tnfr/graph_utils.py +84 -0
- tnfr/helpers/__init__.py +71 -0
- tnfr/helpers/numeric.py +87 -0
- tnfr/immutable.py +178 -0
- tnfr/import_utils.py +228 -0
- tnfr/initialization.py +197 -0
- tnfr/io.py +246 -0
- tnfr/json_utils.py +162 -0
- tnfr/locking.py +37 -0
- tnfr/logging_utils.py +116 -0
- tnfr/metrics/__init__.py +41 -0
- tnfr/metrics/coherence.py +829 -0
- tnfr/metrics/common.py +151 -0
- tnfr/metrics/core.py +101 -0
- tnfr/metrics/diagnosis.py +234 -0
- tnfr/metrics/export.py +137 -0
- tnfr/metrics/glyph_timing.py +189 -0
- tnfr/metrics/reporting.py +148 -0
- tnfr/metrics/sense_index.py +120 -0
- tnfr/metrics/trig.py +181 -0
- tnfr/metrics/trig_cache.py +109 -0
- tnfr/node.py +214 -159
- tnfr/observers.py +126 -136
- tnfr/ontosim.py +134 -134
- tnfr/operators/__init__.py +420 -0
- tnfr/operators/jitter.py +203 -0
- tnfr/operators/remesh.py +485 -0
- tnfr/presets.py +46 -14
- tnfr/rng.py +254 -0
- tnfr/selector.py +210 -0
- tnfr/sense.py +284 -131
- tnfr/structural.py +207 -79
- tnfr/tokens.py +60 -0
- tnfr/trace.py +329 -94
- tnfr/types.py +43 -17
- tnfr/validators.py +70 -24
- tnfr/value_utils.py +59 -0
- tnfr-4.5.2.dist-info/METADATA +379 -0
- tnfr-4.5.2.dist-info/RECORD +67 -0
- tnfr/cli.py +0 -322
- tnfr/constants.py +0 -277
- tnfr/dynamics.py +0 -814
- tnfr/helpers.py +0 -264
- tnfr/main.py +0 -47
- tnfr/metrics.py +0 -597
- tnfr/operators.py +0 -525
- tnfr/program.py +0 -176
- tnfr/scenarios.py +0 -34
- tnfr-4.5.1.dist-info/METADATA +0 -221
- tnfr-4.5.1.dist-info/RECORD +0 -28
- {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/WHEEL +0 -0
- {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/licenses/LICENSE.md +0 -0
- {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/top_level.txt +0 -0
tnfr/flatten.py
ADDED
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
"""Flattening utilities to compile TNFR token sequences."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Collection, Iterable, Mapping, Sequence
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from itertools import chain
|
|
8
|
+
from typing import Any, Callable
|
|
9
|
+
|
|
10
|
+
from .collections_utils import (
|
|
11
|
+
MAX_MATERIALIZE_DEFAULT,
|
|
12
|
+
ensure_collection,
|
|
13
|
+
flatten_structure,
|
|
14
|
+
STRING_TYPES,
|
|
15
|
+
normalize_materialize_limit,
|
|
16
|
+
)
|
|
17
|
+
from .constants_glyphs import GLYPHS_CANONICAL_SET
|
|
18
|
+
from .tokens import THOL, TARGET, WAIT, OpTag, THOL_SENTINEL, Token
|
|
19
|
+
from .types import Glyph
|
|
20
|
+
|
|
21
|
+
__all__ = [
|
|
22
|
+
"THOLEvaluator",
|
|
23
|
+
"parse_program_tokens",
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class TholFrame:
|
|
29
|
+
"""Execution frame used to evaluate nested THOL blocks."""
|
|
30
|
+
|
|
31
|
+
seq: Sequence[Token]
|
|
32
|
+
index: int
|
|
33
|
+
remaining: int
|
|
34
|
+
closing: Glyph | None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _iter_source(
|
|
38
|
+
seq: Iterable[Token] | Sequence[Token] | Any,
|
|
39
|
+
*,
|
|
40
|
+
max_materialize: int | None,
|
|
41
|
+
) -> Iterable[Any]:
|
|
42
|
+
"""Yield items from ``seq`` enforcing ``max_materialize`` when needed."""
|
|
43
|
+
|
|
44
|
+
if isinstance(seq, Collection) and not isinstance(seq, STRING_TYPES):
|
|
45
|
+
return seq
|
|
46
|
+
|
|
47
|
+
if isinstance(seq, STRING_TYPES):
|
|
48
|
+
return (seq,)
|
|
49
|
+
|
|
50
|
+
if not isinstance(seq, Iterable):
|
|
51
|
+
raise TypeError(f"{seq!r} is not iterable")
|
|
52
|
+
|
|
53
|
+
limit = normalize_materialize_limit(max_materialize)
|
|
54
|
+
if limit is None:
|
|
55
|
+
return seq
|
|
56
|
+
if limit == 0:
|
|
57
|
+
return ()
|
|
58
|
+
|
|
59
|
+
iterator = iter(seq)
|
|
60
|
+
|
|
61
|
+
def _preview() -> Iterable[Any]:
|
|
62
|
+
for idx, item in enumerate(iterator):
|
|
63
|
+
yield item
|
|
64
|
+
if idx >= limit:
|
|
65
|
+
break
|
|
66
|
+
|
|
67
|
+
preview = ensure_collection(
|
|
68
|
+
_preview(),
|
|
69
|
+
max_materialize=limit,
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
if not preview:
|
|
73
|
+
return ()
|
|
74
|
+
|
|
75
|
+
return chain(preview, iterator)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _push_thol_frame(
|
|
79
|
+
frames: list[TholFrame],
|
|
80
|
+
item: THOL,
|
|
81
|
+
*,
|
|
82
|
+
max_materialize: int | None,
|
|
83
|
+
) -> None:
|
|
84
|
+
"""Validate ``item`` and append a frame for its evaluation."""
|
|
85
|
+
|
|
86
|
+
repeats = int(item.repeat)
|
|
87
|
+
if repeats < 1:
|
|
88
|
+
raise ValueError("repeat must be ≥1")
|
|
89
|
+
if item.force_close is not None and not isinstance(item.force_close, Glyph):
|
|
90
|
+
raise ValueError("force_close must be a Glyph")
|
|
91
|
+
closing = (
|
|
92
|
+
item.force_close
|
|
93
|
+
if isinstance(item.force_close, Glyph)
|
|
94
|
+
and item.force_close in {Glyph.SHA, Glyph.NUL}
|
|
95
|
+
else None
|
|
96
|
+
)
|
|
97
|
+
seq0 = ensure_collection(
|
|
98
|
+
item.body,
|
|
99
|
+
max_materialize=max_materialize,
|
|
100
|
+
error_msg=f"THOL body exceeds max_materialize={max_materialize}",
|
|
101
|
+
)
|
|
102
|
+
frames.append(
|
|
103
|
+
TholFrame(
|
|
104
|
+
seq=seq0,
|
|
105
|
+
index=0,
|
|
106
|
+
remaining=repeats,
|
|
107
|
+
closing=closing,
|
|
108
|
+
)
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class THOLEvaluator:
|
|
113
|
+
"""Generator that expands a :class:`THOL` block lazily."""
|
|
114
|
+
|
|
115
|
+
def __init__(
|
|
116
|
+
self,
|
|
117
|
+
item: THOL,
|
|
118
|
+
*,
|
|
119
|
+
max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
|
|
120
|
+
) -> None:
|
|
121
|
+
self._frames: list[TholFrame] = []
|
|
122
|
+
_push_thol_frame(self._frames, item, max_materialize=max_materialize)
|
|
123
|
+
self._max_materialize = max_materialize
|
|
124
|
+
self._started = False
|
|
125
|
+
|
|
126
|
+
def __iter__(self) -> "THOLEvaluator":
|
|
127
|
+
return self
|
|
128
|
+
|
|
129
|
+
def __next__(self):
|
|
130
|
+
if not self._started:
|
|
131
|
+
self._started = True
|
|
132
|
+
return THOL_SENTINEL
|
|
133
|
+
while self._frames:
|
|
134
|
+
frame = self._frames[-1]
|
|
135
|
+
seq = frame.seq
|
|
136
|
+
idx = frame.index
|
|
137
|
+
if idx < len(seq):
|
|
138
|
+
token = seq[idx]
|
|
139
|
+
frame.index = idx + 1
|
|
140
|
+
if isinstance(token, THOL):
|
|
141
|
+
_push_thol_frame(
|
|
142
|
+
self._frames,
|
|
143
|
+
token,
|
|
144
|
+
max_materialize=self._max_materialize,
|
|
145
|
+
)
|
|
146
|
+
return THOL_SENTINEL
|
|
147
|
+
return token
|
|
148
|
+
else:
|
|
149
|
+
cl = frame.closing
|
|
150
|
+
frame.remaining -= 1
|
|
151
|
+
if frame.remaining > 0:
|
|
152
|
+
frame.index = 0
|
|
153
|
+
else:
|
|
154
|
+
self._frames.pop()
|
|
155
|
+
if cl is not None:
|
|
156
|
+
return cl
|
|
157
|
+
raise StopIteration
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def _flatten_target(
|
|
161
|
+
item: TARGET,
|
|
162
|
+
ops: list[tuple[OpTag, Any]],
|
|
163
|
+
) -> None:
|
|
164
|
+
ops.append((OpTag.TARGET, item))
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def _flatten_wait(
|
|
168
|
+
item: WAIT,
|
|
169
|
+
ops: list[tuple[OpTag, Any]],
|
|
170
|
+
) -> None:
|
|
171
|
+
steps = max(1, int(getattr(item, "steps", 1)))
|
|
172
|
+
ops.append((OpTag.WAIT, steps))
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def _flatten_glyph(
|
|
176
|
+
item: Glyph | str,
|
|
177
|
+
ops: list[tuple[OpTag, Any]],
|
|
178
|
+
) -> None:
|
|
179
|
+
g = item.value if isinstance(item, Glyph) else str(item)
|
|
180
|
+
if g not in GLYPHS_CANONICAL_SET:
|
|
181
|
+
raise ValueError(f"Non-canonical glyph: {g}")
|
|
182
|
+
ops.append((OpTag.GLYPH, g))
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
_TOKEN_DISPATCH: dict[type, Callable[[Any, list[tuple[OpTag, Any]]], None]] = {
|
|
186
|
+
TARGET: _flatten_target,
|
|
187
|
+
WAIT: _flatten_wait,
|
|
188
|
+
Glyph: _flatten_glyph,
|
|
189
|
+
str: _flatten_glyph,
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def _coerce_mapping_token(
|
|
194
|
+
mapping: Mapping[str, Any],
|
|
195
|
+
*,
|
|
196
|
+
max_materialize: int | None,
|
|
197
|
+
) -> Token:
|
|
198
|
+
if len(mapping) != 1:
|
|
199
|
+
raise ValueError(f"Invalid token mapping: {mapping!r}")
|
|
200
|
+
key, value = next(iter(mapping.items()))
|
|
201
|
+
if key == "WAIT":
|
|
202
|
+
return WAIT(int(value))
|
|
203
|
+
if key == "TARGET":
|
|
204
|
+
return TARGET(value)
|
|
205
|
+
if key != "THOL":
|
|
206
|
+
raise ValueError(f"Unrecognized token: {key!r}")
|
|
207
|
+
if not isinstance(value, Mapping):
|
|
208
|
+
raise TypeError("THOL specification must be a mapping")
|
|
209
|
+
|
|
210
|
+
close = value.get("close")
|
|
211
|
+
if isinstance(close, str):
|
|
212
|
+
close_enum = Glyph.__members__.get(close)
|
|
213
|
+
if close_enum is None:
|
|
214
|
+
raise ValueError(f"Glyph de cierre desconocido: {close!r}")
|
|
215
|
+
close = close_enum
|
|
216
|
+
elif close is not None and not isinstance(close, Glyph):
|
|
217
|
+
raise TypeError("THOL close glyph must be a Glyph or string name")
|
|
218
|
+
|
|
219
|
+
body = parse_program_tokens(value.get("body", []), max_materialize=max_materialize)
|
|
220
|
+
repeat = int(value.get("repeat", 1))
|
|
221
|
+
return THOL(body=body, repeat=repeat, force_close=close)
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def parse_program_tokens(
|
|
225
|
+
obj: Iterable[Any] | Sequence[Any] | Any,
|
|
226
|
+
*,
|
|
227
|
+
max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
|
|
228
|
+
) -> list[Token]:
|
|
229
|
+
"""Materialize ``obj`` into a list of canonical tokens.
|
|
230
|
+
|
|
231
|
+
The function accepts the same iterables handled by :func:`_flatten`,
|
|
232
|
+
including dictionaries describing ``WAIT``, ``TARGET`` and ``THOL`` tokens.
|
|
233
|
+
Nested iterables are flattened following :func:`flatten_structure` rules.
|
|
234
|
+
"""
|
|
235
|
+
|
|
236
|
+
sequence = _iter_source(obj, max_materialize=max_materialize)
|
|
237
|
+
|
|
238
|
+
def _expand(item: Any):
|
|
239
|
+
if isinstance(item, Mapping):
|
|
240
|
+
return (_coerce_mapping_token(item, max_materialize=max_materialize),)
|
|
241
|
+
return None
|
|
242
|
+
|
|
243
|
+
tokens: list[Token] = []
|
|
244
|
+
for item in flatten_structure(sequence, expand=_expand):
|
|
245
|
+
if isinstance(item, (Glyph, WAIT, TARGET, THOL, str)):
|
|
246
|
+
tokens.append(item)
|
|
247
|
+
continue
|
|
248
|
+
raise TypeError(f"Unsupported token: {item!r}")
|
|
249
|
+
return tokens
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def _flatten(
|
|
253
|
+
seq: Iterable[Token] | Sequence[Token] | Any,
|
|
254
|
+
*,
|
|
255
|
+
max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
|
|
256
|
+
) -> list[tuple[OpTag, Any]]:
|
|
257
|
+
"""Return a list of operations ``(op, payload)`` where ``op`` ∈ :class:`OpTag`."""
|
|
258
|
+
|
|
259
|
+
ops: list[tuple[OpTag, Any]] = []
|
|
260
|
+
sequence = _iter_source(seq, max_materialize=max_materialize)
|
|
261
|
+
|
|
262
|
+
def _expand(item: Any):
|
|
263
|
+
if isinstance(item, THOL):
|
|
264
|
+
return THOLEvaluator(item, max_materialize=max_materialize)
|
|
265
|
+
if isinstance(item, Mapping):
|
|
266
|
+
token = _coerce_mapping_token(item, max_materialize=max_materialize)
|
|
267
|
+
return (token,)
|
|
268
|
+
return None
|
|
269
|
+
|
|
270
|
+
for item in flatten_structure(sequence, expand=_expand):
|
|
271
|
+
if item is THOL_SENTINEL:
|
|
272
|
+
ops.append((OpTag.THOL, Glyph.THOL.value))
|
|
273
|
+
continue
|
|
274
|
+
handler = _TOKEN_DISPATCH.get(type(item))
|
|
275
|
+
if handler is None:
|
|
276
|
+
for cls, candidate in _TOKEN_DISPATCH.items():
|
|
277
|
+
if isinstance(item, cls):
|
|
278
|
+
handler = candidate
|
|
279
|
+
break
|
|
280
|
+
if handler is None:
|
|
281
|
+
raise TypeError(f"Unsupported token: {item!r}")
|
|
282
|
+
handler(item, ops)
|
|
283
|
+
return ops
|