tnfr 4.5.1__py3-none-any.whl → 4.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (78) hide show
  1. tnfr/__init__.py +91 -90
  2. tnfr/alias.py +546 -0
  3. tnfr/cache.py +578 -0
  4. tnfr/callback_utils.py +388 -0
  5. tnfr/cli/__init__.py +75 -0
  6. tnfr/cli/arguments.py +177 -0
  7. tnfr/cli/execution.py +288 -0
  8. tnfr/cli/utils.py +36 -0
  9. tnfr/collections_utils.py +300 -0
  10. tnfr/config.py +19 -28
  11. tnfr/constants/__init__.py +174 -0
  12. tnfr/constants/core.py +159 -0
  13. tnfr/constants/init.py +31 -0
  14. tnfr/constants/metric.py +110 -0
  15. tnfr/constants_glyphs.py +98 -0
  16. tnfr/dynamics/__init__.py +658 -0
  17. tnfr/dynamics/dnfr.py +733 -0
  18. tnfr/dynamics/integrators.py +267 -0
  19. tnfr/dynamics/sampling.py +31 -0
  20. tnfr/execution.py +201 -0
  21. tnfr/flatten.py +283 -0
  22. tnfr/gamma.py +302 -88
  23. tnfr/glyph_history.py +290 -0
  24. tnfr/grammar.py +285 -96
  25. tnfr/graph_utils.py +84 -0
  26. tnfr/helpers/__init__.py +71 -0
  27. tnfr/helpers/numeric.py +87 -0
  28. tnfr/immutable.py +178 -0
  29. tnfr/import_utils.py +228 -0
  30. tnfr/initialization.py +197 -0
  31. tnfr/io.py +246 -0
  32. tnfr/json_utils.py +162 -0
  33. tnfr/locking.py +37 -0
  34. tnfr/logging_utils.py +116 -0
  35. tnfr/metrics/__init__.py +41 -0
  36. tnfr/metrics/coherence.py +829 -0
  37. tnfr/metrics/common.py +151 -0
  38. tnfr/metrics/core.py +101 -0
  39. tnfr/metrics/diagnosis.py +234 -0
  40. tnfr/metrics/export.py +137 -0
  41. tnfr/metrics/glyph_timing.py +189 -0
  42. tnfr/metrics/reporting.py +148 -0
  43. tnfr/metrics/sense_index.py +120 -0
  44. tnfr/metrics/trig.py +181 -0
  45. tnfr/metrics/trig_cache.py +109 -0
  46. tnfr/node.py +214 -159
  47. tnfr/observers.py +126 -136
  48. tnfr/ontosim.py +134 -134
  49. tnfr/operators/__init__.py +420 -0
  50. tnfr/operators/jitter.py +203 -0
  51. tnfr/operators/remesh.py +485 -0
  52. tnfr/presets.py +46 -14
  53. tnfr/rng.py +254 -0
  54. tnfr/selector.py +210 -0
  55. tnfr/sense.py +284 -131
  56. tnfr/structural.py +207 -79
  57. tnfr/tokens.py +60 -0
  58. tnfr/trace.py +329 -94
  59. tnfr/types.py +43 -17
  60. tnfr/validators.py +70 -24
  61. tnfr/value_utils.py +59 -0
  62. tnfr-4.5.2.dist-info/METADATA +379 -0
  63. tnfr-4.5.2.dist-info/RECORD +67 -0
  64. tnfr/cli.py +0 -322
  65. tnfr/constants.py +0 -277
  66. tnfr/dynamics.py +0 -814
  67. tnfr/helpers.py +0 -264
  68. tnfr/main.py +0 -47
  69. tnfr/metrics.py +0 -597
  70. tnfr/operators.py +0 -525
  71. tnfr/program.py +0 -176
  72. tnfr/scenarios.py +0 -34
  73. tnfr-4.5.1.dist-info/METADATA +0 -221
  74. tnfr-4.5.1.dist-info/RECORD +0 -28
  75. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/WHEEL +0 -0
  76. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/entry_points.txt +0 -0
  77. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/licenses/LICENSE.md +0 -0
  78. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/top_level.txt +0 -0
tnfr/flatten.py ADDED
@@ -0,0 +1,283 @@
1
+ """Flattening utilities to compile TNFR token sequences."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections.abc import Collection, Iterable, Mapping, Sequence
6
+ from dataclasses import dataclass
7
+ from itertools import chain
8
+ from typing import Any, Callable
9
+
10
+ from .collections_utils import (
11
+ MAX_MATERIALIZE_DEFAULT,
12
+ ensure_collection,
13
+ flatten_structure,
14
+ STRING_TYPES,
15
+ normalize_materialize_limit,
16
+ )
17
+ from .constants_glyphs import GLYPHS_CANONICAL_SET
18
+ from .tokens import THOL, TARGET, WAIT, OpTag, THOL_SENTINEL, Token
19
+ from .types import Glyph
20
+
21
+ __all__ = [
22
+ "THOLEvaluator",
23
+ "parse_program_tokens",
24
+ ]
25
+
26
+
27
+ @dataclass
28
+ class TholFrame:
29
+ """Execution frame used to evaluate nested THOL blocks."""
30
+
31
+ seq: Sequence[Token]
32
+ index: int
33
+ remaining: int
34
+ closing: Glyph | None
35
+
36
+
37
+ def _iter_source(
38
+ seq: Iterable[Token] | Sequence[Token] | Any,
39
+ *,
40
+ max_materialize: int | None,
41
+ ) -> Iterable[Any]:
42
+ """Yield items from ``seq`` enforcing ``max_materialize`` when needed."""
43
+
44
+ if isinstance(seq, Collection) and not isinstance(seq, STRING_TYPES):
45
+ return seq
46
+
47
+ if isinstance(seq, STRING_TYPES):
48
+ return (seq,)
49
+
50
+ if not isinstance(seq, Iterable):
51
+ raise TypeError(f"{seq!r} is not iterable")
52
+
53
+ limit = normalize_materialize_limit(max_materialize)
54
+ if limit is None:
55
+ return seq
56
+ if limit == 0:
57
+ return ()
58
+
59
+ iterator = iter(seq)
60
+
61
+ def _preview() -> Iterable[Any]:
62
+ for idx, item in enumerate(iterator):
63
+ yield item
64
+ if idx >= limit:
65
+ break
66
+
67
+ preview = ensure_collection(
68
+ _preview(),
69
+ max_materialize=limit,
70
+ )
71
+
72
+ if not preview:
73
+ return ()
74
+
75
+ return chain(preview, iterator)
76
+
77
+
78
+ def _push_thol_frame(
79
+ frames: list[TholFrame],
80
+ item: THOL,
81
+ *,
82
+ max_materialize: int | None,
83
+ ) -> None:
84
+ """Validate ``item`` and append a frame for its evaluation."""
85
+
86
+ repeats = int(item.repeat)
87
+ if repeats < 1:
88
+ raise ValueError("repeat must be ≥1")
89
+ if item.force_close is not None and not isinstance(item.force_close, Glyph):
90
+ raise ValueError("force_close must be a Glyph")
91
+ closing = (
92
+ item.force_close
93
+ if isinstance(item.force_close, Glyph)
94
+ and item.force_close in {Glyph.SHA, Glyph.NUL}
95
+ else None
96
+ )
97
+ seq0 = ensure_collection(
98
+ item.body,
99
+ max_materialize=max_materialize,
100
+ error_msg=f"THOL body exceeds max_materialize={max_materialize}",
101
+ )
102
+ frames.append(
103
+ TholFrame(
104
+ seq=seq0,
105
+ index=0,
106
+ remaining=repeats,
107
+ closing=closing,
108
+ )
109
+ )
110
+
111
+
112
+ class THOLEvaluator:
113
+ """Generator that expands a :class:`THOL` block lazily."""
114
+
115
+ def __init__(
116
+ self,
117
+ item: THOL,
118
+ *,
119
+ max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
120
+ ) -> None:
121
+ self._frames: list[TholFrame] = []
122
+ _push_thol_frame(self._frames, item, max_materialize=max_materialize)
123
+ self._max_materialize = max_materialize
124
+ self._started = False
125
+
126
+ def __iter__(self) -> "THOLEvaluator":
127
+ return self
128
+
129
+ def __next__(self):
130
+ if not self._started:
131
+ self._started = True
132
+ return THOL_SENTINEL
133
+ while self._frames:
134
+ frame = self._frames[-1]
135
+ seq = frame.seq
136
+ idx = frame.index
137
+ if idx < len(seq):
138
+ token = seq[idx]
139
+ frame.index = idx + 1
140
+ if isinstance(token, THOL):
141
+ _push_thol_frame(
142
+ self._frames,
143
+ token,
144
+ max_materialize=self._max_materialize,
145
+ )
146
+ return THOL_SENTINEL
147
+ return token
148
+ else:
149
+ cl = frame.closing
150
+ frame.remaining -= 1
151
+ if frame.remaining > 0:
152
+ frame.index = 0
153
+ else:
154
+ self._frames.pop()
155
+ if cl is not None:
156
+ return cl
157
+ raise StopIteration
158
+
159
+
160
+ def _flatten_target(
161
+ item: TARGET,
162
+ ops: list[tuple[OpTag, Any]],
163
+ ) -> None:
164
+ ops.append((OpTag.TARGET, item))
165
+
166
+
167
+ def _flatten_wait(
168
+ item: WAIT,
169
+ ops: list[tuple[OpTag, Any]],
170
+ ) -> None:
171
+ steps = max(1, int(getattr(item, "steps", 1)))
172
+ ops.append((OpTag.WAIT, steps))
173
+
174
+
175
+ def _flatten_glyph(
176
+ item: Glyph | str,
177
+ ops: list[tuple[OpTag, Any]],
178
+ ) -> None:
179
+ g = item.value if isinstance(item, Glyph) else str(item)
180
+ if g not in GLYPHS_CANONICAL_SET:
181
+ raise ValueError(f"Non-canonical glyph: {g}")
182
+ ops.append((OpTag.GLYPH, g))
183
+
184
+
185
+ _TOKEN_DISPATCH: dict[type, Callable[[Any, list[tuple[OpTag, Any]]], None]] = {
186
+ TARGET: _flatten_target,
187
+ WAIT: _flatten_wait,
188
+ Glyph: _flatten_glyph,
189
+ str: _flatten_glyph,
190
+ }
191
+
192
+
193
+ def _coerce_mapping_token(
194
+ mapping: Mapping[str, Any],
195
+ *,
196
+ max_materialize: int | None,
197
+ ) -> Token:
198
+ if len(mapping) != 1:
199
+ raise ValueError(f"Invalid token mapping: {mapping!r}")
200
+ key, value = next(iter(mapping.items()))
201
+ if key == "WAIT":
202
+ return WAIT(int(value))
203
+ if key == "TARGET":
204
+ return TARGET(value)
205
+ if key != "THOL":
206
+ raise ValueError(f"Unrecognized token: {key!r}")
207
+ if not isinstance(value, Mapping):
208
+ raise TypeError("THOL specification must be a mapping")
209
+
210
+ close = value.get("close")
211
+ if isinstance(close, str):
212
+ close_enum = Glyph.__members__.get(close)
213
+ if close_enum is None:
214
+ raise ValueError(f"Glyph de cierre desconocido: {close!r}")
215
+ close = close_enum
216
+ elif close is not None and not isinstance(close, Glyph):
217
+ raise TypeError("THOL close glyph must be a Glyph or string name")
218
+
219
+ body = parse_program_tokens(value.get("body", []), max_materialize=max_materialize)
220
+ repeat = int(value.get("repeat", 1))
221
+ return THOL(body=body, repeat=repeat, force_close=close)
222
+
223
+
224
+ def parse_program_tokens(
225
+ obj: Iterable[Any] | Sequence[Any] | Any,
226
+ *,
227
+ max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
228
+ ) -> list[Token]:
229
+ """Materialize ``obj`` into a list of canonical tokens.
230
+
231
+ The function accepts the same iterables handled by :func:`_flatten`,
232
+ including dictionaries describing ``WAIT``, ``TARGET`` and ``THOL`` tokens.
233
+ Nested iterables are flattened following :func:`flatten_structure` rules.
234
+ """
235
+
236
+ sequence = _iter_source(obj, max_materialize=max_materialize)
237
+
238
+ def _expand(item: Any):
239
+ if isinstance(item, Mapping):
240
+ return (_coerce_mapping_token(item, max_materialize=max_materialize),)
241
+ return None
242
+
243
+ tokens: list[Token] = []
244
+ for item in flatten_structure(sequence, expand=_expand):
245
+ if isinstance(item, (Glyph, WAIT, TARGET, THOL, str)):
246
+ tokens.append(item)
247
+ continue
248
+ raise TypeError(f"Unsupported token: {item!r}")
249
+ return tokens
250
+
251
+
252
+ def _flatten(
253
+ seq: Iterable[Token] | Sequence[Token] | Any,
254
+ *,
255
+ max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
256
+ ) -> list[tuple[OpTag, Any]]:
257
+ """Return a list of operations ``(op, payload)`` where ``op`` ∈ :class:`OpTag`."""
258
+
259
+ ops: list[tuple[OpTag, Any]] = []
260
+ sequence = _iter_source(seq, max_materialize=max_materialize)
261
+
262
+ def _expand(item: Any):
263
+ if isinstance(item, THOL):
264
+ return THOLEvaluator(item, max_materialize=max_materialize)
265
+ if isinstance(item, Mapping):
266
+ token = _coerce_mapping_token(item, max_materialize=max_materialize)
267
+ return (token,)
268
+ return None
269
+
270
+ for item in flatten_structure(sequence, expand=_expand):
271
+ if item is THOL_SENTINEL:
272
+ ops.append((OpTag.THOL, Glyph.THOL.value))
273
+ continue
274
+ handler = _TOKEN_DISPATCH.get(type(item))
275
+ if handler is None:
276
+ for cls, candidate in _TOKEN_DISPATCH.items():
277
+ if isinstance(item, cls):
278
+ handler = candidate
279
+ break
280
+ if handler is None:
281
+ raise TypeError(f"Unsupported token: {item!r}")
282
+ handler(item, ops)
283
+ return ops