tnfr 4.5.2__py3-none-any.whl → 6.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. tnfr/__init__.py +228 -49
  2. tnfr/__init__.pyi +40 -0
  3. tnfr/_compat.py +11 -0
  4. tnfr/_version.py +7 -0
  5. tnfr/_version.pyi +7 -0
  6. tnfr/alias.py +106 -21
  7. tnfr/alias.pyi +140 -0
  8. tnfr/cache.py +666 -512
  9. tnfr/cache.pyi +232 -0
  10. tnfr/callback_utils.py +2 -9
  11. tnfr/callback_utils.pyi +105 -0
  12. tnfr/cli/__init__.py +21 -7
  13. tnfr/cli/__init__.pyi +47 -0
  14. tnfr/cli/arguments.py +42 -20
  15. tnfr/cli/arguments.pyi +33 -0
  16. tnfr/cli/execution.py +54 -20
  17. tnfr/cli/execution.pyi +80 -0
  18. tnfr/cli/utils.py +0 -2
  19. tnfr/cli/utils.pyi +8 -0
  20. tnfr/config/__init__.py +12 -0
  21. tnfr/config/__init__.pyi +8 -0
  22. tnfr/config/constants.py +104 -0
  23. tnfr/config/constants.pyi +12 -0
  24. tnfr/{config.py → config/init.py} +11 -7
  25. tnfr/config/init.pyi +8 -0
  26. tnfr/config/operator_names.py +106 -0
  27. tnfr/config/operator_names.pyi +28 -0
  28. tnfr/config/presets.py +104 -0
  29. tnfr/config/presets.pyi +7 -0
  30. tnfr/constants/__init__.py +78 -24
  31. tnfr/constants/__init__.pyi +104 -0
  32. tnfr/constants/core.py +1 -2
  33. tnfr/constants/core.pyi +17 -0
  34. tnfr/constants/init.pyi +12 -0
  35. tnfr/constants/metric.py +4 -12
  36. tnfr/constants/metric.pyi +19 -0
  37. tnfr/constants_glyphs.py +9 -91
  38. tnfr/constants_glyphs.pyi +12 -0
  39. tnfr/dynamics/__init__.py +112 -634
  40. tnfr/dynamics/__init__.pyi +83 -0
  41. tnfr/dynamics/adaptation.py +201 -0
  42. tnfr/dynamics/aliases.py +22 -0
  43. tnfr/dynamics/coordination.py +343 -0
  44. tnfr/dynamics/dnfr.py +1936 -354
  45. tnfr/dynamics/dnfr.pyi +33 -0
  46. tnfr/dynamics/integrators.py +369 -75
  47. tnfr/dynamics/integrators.pyi +35 -0
  48. tnfr/dynamics/runtime.py +521 -0
  49. tnfr/dynamics/sampling.py +8 -5
  50. tnfr/dynamics/sampling.pyi +7 -0
  51. tnfr/dynamics/selectors.py +680 -0
  52. tnfr/execution.py +56 -41
  53. tnfr/execution.pyi +65 -0
  54. tnfr/flatten.py +7 -7
  55. tnfr/flatten.pyi +28 -0
  56. tnfr/gamma.py +54 -37
  57. tnfr/gamma.pyi +40 -0
  58. tnfr/glyph_history.py +85 -38
  59. tnfr/glyph_history.pyi +53 -0
  60. tnfr/grammar.py +19 -338
  61. tnfr/grammar.pyi +13 -0
  62. tnfr/helpers/__init__.py +110 -30
  63. tnfr/helpers/__init__.pyi +66 -0
  64. tnfr/helpers/numeric.py +1 -0
  65. tnfr/helpers/numeric.pyi +12 -0
  66. tnfr/immutable.py +55 -19
  67. tnfr/immutable.pyi +37 -0
  68. tnfr/initialization.py +12 -10
  69. tnfr/initialization.pyi +73 -0
  70. tnfr/io.py +99 -34
  71. tnfr/io.pyi +11 -0
  72. tnfr/locking.pyi +7 -0
  73. tnfr/metrics/__init__.pyi +20 -0
  74. tnfr/metrics/coherence.py +934 -294
  75. tnfr/metrics/common.py +1 -3
  76. tnfr/metrics/common.pyi +15 -0
  77. tnfr/metrics/core.py +192 -34
  78. tnfr/metrics/core.pyi +13 -0
  79. tnfr/metrics/diagnosis.py +707 -101
  80. tnfr/metrics/diagnosis.pyi +89 -0
  81. tnfr/metrics/export.py +27 -13
  82. tnfr/metrics/glyph_timing.py +218 -38
  83. tnfr/metrics/reporting.py +22 -18
  84. tnfr/metrics/reporting.pyi +12 -0
  85. tnfr/metrics/sense_index.py +199 -25
  86. tnfr/metrics/sense_index.pyi +9 -0
  87. tnfr/metrics/trig.py +53 -18
  88. tnfr/metrics/trig.pyi +12 -0
  89. tnfr/metrics/trig_cache.py +3 -7
  90. tnfr/metrics/trig_cache.pyi +10 -0
  91. tnfr/node.py +148 -125
  92. tnfr/node.pyi +161 -0
  93. tnfr/observers.py +44 -30
  94. tnfr/observers.pyi +46 -0
  95. tnfr/ontosim.py +14 -13
  96. tnfr/ontosim.pyi +33 -0
  97. tnfr/operators/__init__.py +84 -52
  98. tnfr/operators/__init__.pyi +31 -0
  99. tnfr/operators/definitions.py +181 -0
  100. tnfr/operators/definitions.pyi +92 -0
  101. tnfr/operators/jitter.py +86 -23
  102. tnfr/operators/jitter.pyi +11 -0
  103. tnfr/operators/registry.py +80 -0
  104. tnfr/operators/registry.pyi +15 -0
  105. tnfr/operators/remesh.py +141 -57
  106. tnfr/presets.py +9 -54
  107. tnfr/presets.pyi +7 -0
  108. tnfr/py.typed +0 -0
  109. tnfr/rng.py +259 -73
  110. tnfr/rng.pyi +14 -0
  111. tnfr/selector.py +24 -17
  112. tnfr/selector.pyi +19 -0
  113. tnfr/sense.py +55 -43
  114. tnfr/sense.pyi +30 -0
  115. tnfr/structural.py +44 -267
  116. tnfr/structural.pyi +46 -0
  117. tnfr/telemetry/__init__.py +13 -0
  118. tnfr/telemetry/verbosity.py +37 -0
  119. tnfr/tokens.py +3 -2
  120. tnfr/tokens.pyi +41 -0
  121. tnfr/trace.py +272 -82
  122. tnfr/trace.pyi +68 -0
  123. tnfr/types.py +345 -6
  124. tnfr/types.pyi +145 -0
  125. tnfr/utils/__init__.py +158 -0
  126. tnfr/utils/__init__.pyi +133 -0
  127. tnfr/utils/cache.py +755 -0
  128. tnfr/utils/cache.pyi +156 -0
  129. tnfr/{collections_utils.py → utils/data.py} +57 -90
  130. tnfr/utils/data.pyi +73 -0
  131. tnfr/utils/graph.py +87 -0
  132. tnfr/utils/graph.pyi +10 -0
  133. tnfr/utils/init.py +746 -0
  134. tnfr/utils/init.pyi +85 -0
  135. tnfr/{json_utils.py → utils/io.py} +13 -18
  136. tnfr/utils/io.pyi +10 -0
  137. tnfr/utils/validators.py +130 -0
  138. tnfr/utils/validators.pyi +19 -0
  139. tnfr/validation/__init__.py +25 -0
  140. tnfr/validation/__init__.pyi +17 -0
  141. tnfr/validation/compatibility.py +59 -0
  142. tnfr/validation/compatibility.pyi +8 -0
  143. tnfr/validation/grammar.py +149 -0
  144. tnfr/validation/grammar.pyi +11 -0
  145. tnfr/validation/rules.py +194 -0
  146. tnfr/validation/rules.pyi +18 -0
  147. tnfr/validation/syntax.py +151 -0
  148. tnfr/validation/syntax.pyi +7 -0
  149. tnfr-6.0.0.dist-info/METADATA +135 -0
  150. tnfr-6.0.0.dist-info/RECORD +157 -0
  151. tnfr/graph_utils.py +0 -84
  152. tnfr/import_utils.py +0 -228
  153. tnfr/logging_utils.py +0 -116
  154. tnfr/validators.py +0 -84
  155. tnfr/value_utils.py +0 -59
  156. tnfr-4.5.2.dist-info/METADATA +0 -379
  157. tnfr-4.5.2.dist-info/RECORD +0 -67
  158. {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/WHEEL +0 -0
  159. {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/entry_points.txt +0 -0
  160. {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/licenses/LICENSE.md +0 -0
  161. {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/top_level.txt +0 -0
tnfr/glyph_history.py CHANGED
@@ -2,15 +2,14 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from typing import Any
5
+ from typing import Any, cast
6
6
  from collections import deque, Counter
7
7
  from itertools import islice
8
- from collections.abc import Iterable, Mapping
9
- from functools import lru_cache
8
+ from collections.abc import Iterable, Mapping, MutableMapping
10
9
 
11
- from .constants import get_param
12
- from .collections_utils import ensure_collection
13
- from .logging_utils import get_logger
10
+ from .constants import get_param, normalise_state_token
11
+ from .utils import ensure_collection, get_logger, validate_window
12
+ from .types import TNFRGraph
14
13
 
15
14
  logger = get_logger(__name__)
16
15
 
@@ -26,23 +25,12 @@ __all__ = (
26
25
  )
27
26
 
28
27
 
29
- @lru_cache(maxsize=1)
30
- def _resolve_validate_window():
31
- from .validators import validate_window
32
-
33
- return validate_window
34
-
35
-
36
- def _validate_window(window: int, *, positive: bool = False) -> int:
37
- return _resolve_validate_window()(window, positive=positive)
38
-
39
-
40
28
  def _ensure_history(
41
- nd: dict[str, Any], window: int, *, create_zero: bool = False
42
- ) -> tuple[int, deque | None]:
29
+ nd: MutableMapping[str, Any], window: int, *, create_zero: bool = False
30
+ ) -> tuple[int, deque[str] | None]:
43
31
  """Validate ``window`` and ensure ``nd['glyph_history']`` deque."""
44
32
 
45
- v_window = _validate_window(window)
33
+ v_window = validate_window(window)
46
34
  if v_window == 0 and not create_zero:
47
35
  return v_window, None
48
36
  hist = nd.setdefault("glyph_history", deque(maxlen=v_window))
@@ -58,12 +46,12 @@ def _ensure_history(
58
46
  "Discarding non-iterable glyph history value %r", hist
59
47
  )
60
48
  items = ()
61
- hist = deque(items, maxlen=v_window)
49
+ hist = deque((str(item) for item in items), maxlen=v_window)
62
50
  nd["glyph_history"] = hist
63
51
  return v_window, hist
64
52
 
65
53
 
66
- def push_glyph(nd: dict[str, Any], glyph: str, window: int) -> None:
54
+ def push_glyph(nd: MutableMapping[str, Any], glyph: str, window: int) -> None:
67
55
  """Add ``glyph`` to node history with maximum size ``window``.
68
56
 
69
57
  ``window`` validation and deque creation are handled by
@@ -74,7 +62,9 @@ def push_glyph(nd: dict[str, Any], glyph: str, window: int) -> None:
74
62
  hist.append(str(glyph))
75
63
 
76
64
 
77
- def recent_glyph(nd: dict[str, Any], glyph: str, window: int) -> bool:
65
+ def recent_glyph(
66
+ nd: MutableMapping[str, Any], glyph: str, window: int
67
+ ) -> bool:
78
68
  """Return ``True`` if ``glyph`` appeared in last ``window`` emissions.
79
69
 
80
70
  ``window`` validation and deque creation are handled by
@@ -89,7 +79,7 @@ def recent_glyph(nd: dict[str, Any], glyph: str, window: int) -> bool:
89
79
  return gl in hist
90
80
 
91
81
 
92
- class HistoryDict(dict):
82
+ class HistoryDict(dict[str, Any]):
93
83
  """Dict specialized for bounded history series and usage counts.
94
84
 
95
85
  Usage counts are tracked explicitly via :meth:`get_increment`. Accessing
@@ -108,7 +98,7 @@ class HistoryDict(dict):
108
98
 
109
99
  def __init__(
110
100
  self,
111
- data: dict[str, Any] | None = None,
101
+ data: Mapping[str, Any] | None = None,
112
102
  *,
113
103
  maxlen: int = 0,
114
104
  ) -> None:
@@ -129,7 +119,7 @@ class HistoryDict(dict):
129
119
  """Increase usage count for ``key``."""
130
120
  self._counts[key] += 1
131
121
 
132
- def _to_deque(self, val: Any) -> deque:
122
+ def _to_deque(self, val: Any) -> deque[Any]:
133
123
  """Coerce ``val`` to a deque respecting ``self._maxlen``.
134
124
 
135
125
  ``Iterable`` inputs (excluding ``str`` and ``bytes``) are expanded into
@@ -160,21 +150,21 @@ class HistoryDict(dict):
160
150
  self._increment(key)
161
151
  return val
162
152
 
163
- def __getitem__(self, key): # type: ignore[override]
153
+ def __getitem__(self, key: str) -> Any: # type: ignore[override]
164
154
  return self._resolve_value(key, None, insert=False)
165
155
 
166
- def get(self, key, default=None): # type: ignore[override]
156
+ def get(self, key: str, default: Any | None = None) -> Any: # type: ignore[override]
167
157
  try:
168
158
  return self._resolve_value(key, None, insert=False)
169
159
  except KeyError:
170
160
  return default
171
161
 
172
- def __setitem__(self, key, value): # type: ignore[override]
162
+ def __setitem__(self, key: str, value: Any) -> None: # type: ignore[override]
173
163
  super().__setitem__(key, value)
174
164
  if key not in self._counts:
175
165
  self._counts[key] = 0
176
166
 
177
- def setdefault(self, key, default=None): # type: ignore[override]
167
+ def setdefault(self, key: str, default: Any | None = None) -> Any: # type: ignore[override]
178
168
  insert = key not in self
179
169
  val = self._resolve_value(key, default, insert=insert)
180
170
  if insert:
@@ -198,7 +188,7 @@ class HistoryDict(dict):
198
188
  break
199
189
 
200
190
 
201
- def ensure_history(G) -> dict[str, Any]:
191
+ def ensure_history(G: TNFRGraph) -> HistoryDict | dict[str, Any]:
202
192
  """Ensure ``G.graph['history']`` exists and return it.
203
193
 
204
194
  ``HISTORY_MAXLEN`` must be non-negative; otherwise a
@@ -220,6 +210,8 @@ def ensure_history(G) -> dict[str, Any]:
220
210
  replaced = True
221
211
  if replaced:
222
212
  G.graph.pop(sentinel_key, None)
213
+ if isinstance(hist, MutableMapping):
214
+ _normalise_state_streams(hist)
223
215
  return hist
224
216
  if (
225
217
  not isinstance(hist, HistoryDict)
@@ -233,31 +225,52 @@ def ensure_history(G) -> dict[str, Any]:
233
225
  hist.pop_least_used_batch(excess)
234
226
  if replaced:
235
227
  G.graph.pop(sentinel_key, None)
228
+ _normalise_state_streams(cast(MutableMapping[str, Any], hist))
236
229
  return hist
237
230
 
238
231
 
239
- def current_step_idx(G) -> int:
232
+ def current_step_idx(G: TNFRGraph | Mapping[str, Any]) -> int:
240
233
  """Return the current step index from ``G`` history."""
241
234
 
242
235
  graph = getattr(G, "graph", G)
243
236
  return len(graph.get("history", {}).get("C_steps", []))
244
237
 
245
-
246
238
 
247
- def append_metric(hist: dict[str, Any], key: str, value: Any) -> None:
239
+ def append_metric(
240
+ hist: MutableMapping[str, list[Any]], key: str, value: Any
241
+ ) -> None:
248
242
  """Append ``value`` to ``hist[key]`` list, creating it if missing."""
243
+ if key == "phase_state" and isinstance(value, str):
244
+ value = normalise_state_token(value)
245
+ elif key == "nodal_diag" and isinstance(value, Mapping):
246
+ snapshot: dict[Any, Any] = {}
247
+ for node, payload in value.items():
248
+ if isinstance(payload, Mapping):
249
+ state_value = payload.get("state")
250
+ if isinstance(payload, MutableMapping):
251
+ updated = payload
252
+ else:
253
+ updated = dict(payload)
254
+ if isinstance(state_value, str):
255
+ updated["state"] = normalise_state_token(state_value)
256
+ snapshot[node] = updated
257
+ else:
258
+ snapshot[node] = payload
259
+ hist.setdefault(key, []).append(snapshot)
260
+ return
261
+
249
262
  hist.setdefault(key, []).append(value)
250
263
 
251
264
 
252
- def last_glyph(nd: dict[str, Any]) -> str | None:
265
+ def last_glyph(nd: Mapping[str, Any]) -> str | None:
253
266
  """Return the most recent glyph for node or ``None``."""
254
267
  hist = nd.get("glyph_history")
255
268
  return hist[-1] if hist else None
256
269
 
257
270
 
258
271
  def count_glyphs(
259
- G, window: int | None = None, *, last_only: bool = False
260
- ) -> Counter:
272
+ G: TNFRGraph, window: int | None = None, *, last_only: bool = False
273
+ ) -> Counter[str]:
261
274
  """Count recent glyphs in the network.
262
275
 
263
276
  If ``window`` is ``None``, the full history for each node is used. A
@@ -266,7 +279,7 @@ def count_glyphs(
266
279
  """
267
280
 
268
281
  if window is not None:
269
- window = _validate_window(window)
282
+ window = validate_window(window)
270
283
  if window == 0:
271
284
  return Counter()
272
285
 
@@ -288,3 +301,37 @@ def count_glyphs(
288
301
  counts.update(seq)
289
302
 
290
303
  return counts
304
+
305
+
306
+ def _normalise_state_streams(hist: MutableMapping[str, Any]) -> None:
307
+ """Normalise legacy state tokens stored in telemetry history."""
308
+
309
+ phase_state = hist.get("phase_state")
310
+ if isinstance(phase_state, deque):
311
+ canonical = [normalise_state_token(str(item)) for item in phase_state]
312
+ if canonical != list(phase_state):
313
+ phase_state.clear()
314
+ phase_state.extend(canonical)
315
+ elif isinstance(phase_state, list):
316
+ canonical = [normalise_state_token(str(item)) for item in phase_state]
317
+ if canonical != phase_state:
318
+ hist["phase_state"] = canonical
319
+
320
+ diag_history = hist.get("nodal_diag")
321
+ if isinstance(diag_history, list):
322
+ for snapshot in diag_history:
323
+ if not isinstance(snapshot, Mapping):
324
+ continue
325
+ for node, payload in snapshot.items():
326
+ if not isinstance(payload, Mapping):
327
+ continue
328
+ state_value = payload.get("state")
329
+ if not isinstance(state_value, str):
330
+ continue
331
+ canonical = normalise_state_token(state_value)
332
+ if canonical == state_value:
333
+ continue
334
+ if isinstance(payload, MutableMapping):
335
+ payload["state"] = canonical
336
+ else:
337
+ snapshot[node] = {**payload, "state": canonical}
tnfr/glyph_history.pyi ADDED
@@ -0,0 +1,53 @@
1
+ from __future__ import annotations
2
+
3
+ from collections import Counter
4
+ from collections.abc import Mapping, MutableMapping
5
+ from typing import Any
6
+
7
+ from .types import TNFRGraph
8
+
9
+ __all__: tuple[str, ...]
10
+
11
+
12
+ class HistoryDict(dict[str, Any]):
13
+ _maxlen: int
14
+ _counts: Counter[str]
15
+
16
+ def __init__(
17
+ self, data: Mapping[str, Any] | None = ..., *, maxlen: int = ...
18
+ ) -> None: ...
19
+
20
+ def get_increment(self, key: str, default: Any = ...) -> Any: ...
21
+ def __getitem__(self, key: str) -> Any: ...
22
+ def get(self, key: str, default: Any | None = ...) -> Any: ...
23
+ def __setitem__(self, key: str, value: Any) -> None: ...
24
+ def setdefault(self, key: str, default: Any | None = ...) -> Any: ...
25
+ def pop_least_used(self) -> Any: ...
26
+ def pop_least_used_batch(self, k: int) -> None: ...
27
+
28
+
29
+ def push_glyph(nd: MutableMapping[str, Any], glyph: str, window: int) -> None: ...
30
+
31
+
32
+ def recent_glyph(
33
+ nd: MutableMapping[str, Any], glyph: str, window: int
34
+ ) -> bool: ...
35
+
36
+
37
+ def ensure_history(G: TNFRGraph) -> HistoryDict | dict[str, Any]: ...
38
+
39
+
40
+ def current_step_idx(G: TNFRGraph | Mapping[str, Any]) -> int: ...
41
+
42
+
43
+ def append_metric(
44
+ hist: MutableMapping[str, list[Any]], key: str, value: Any
45
+ ) -> None: ...
46
+
47
+
48
+ def last_glyph(nd: Mapping[str, Any]) -> str | None: ...
49
+
50
+
51
+ def count_glyphs(
52
+ G: TNFRGraph, window: int | None = ..., *, last_only: bool = ...
53
+ ) -> Counter[str]: ...
tnfr/grammar.py CHANGED
@@ -1,344 +1,25 @@
1
- """Grammar rules."""
2
-
3
- from __future__ import annotations
4
- from dataclasses import dataclass
5
- from typing import Any, Iterable, Optional, Callable
6
-
7
- from .constants import DEFAULTS, get_aliases, get_param
8
- from .alias import get_attr
9
- from .helpers.numeric import clamp01
10
- from .glyph_history import recent_glyph
11
- from .types import Glyph
12
- from .operators import apply_glyph # avoid repeated import inside functions
13
- from .metrics.common import normalize_dnfr
14
-
15
- ALIAS_SI = get_aliases("SI")
16
- ALIAS_D2EPI = get_aliases("D2EPI")
17
-
18
-
19
- @dataclass
20
- class GrammarContext:
21
- """Shared context for grammar helpers.
22
-
23
- Collects graph-level settings to reduce positional parameters across
24
- helper functions.
25
- """
26
-
27
- G: Any
28
- cfg_soft: dict[str, Any]
29
- cfg_canon: dict[str, Any]
30
- norms: dict[str, Any]
31
-
32
- @classmethod
33
- def from_graph(cls, G: Any) -> "GrammarContext":
34
- """Create a :class:`GrammarContext` for ``G``."""
35
- return cls(
36
- G=G,
37
- cfg_soft=G.graph.get("GRAMMAR", DEFAULTS.get("GRAMMAR", {})),
38
- cfg_canon=G.graph.get(
39
- "GRAMMAR_CANON", DEFAULTS.get("GRAMMAR_CANON", {})
40
- ),
41
- norms=G.graph.get("_sel_norms") or {},
42
- )
43
-
1
+ """Backwards compatibility layer for grammar helpers.
2
+
3
+ The canonical implementations now live in :mod:`tnfr.validation`. This
4
+ module only re-exports them to avoid breaking external callers until the
5
+ new import paths are fully adopted.
6
+ """
7
+
8
+ from .validation.compatibility import CANON_COMPAT, CANON_FALLBACK
9
+ from .validation.grammar import (
10
+ GrammarContext,
11
+ apply_glyph_with_grammar,
12
+ enforce_canonical_grammar,
13
+ on_applied_glyph,
14
+ _gram_state,
15
+ )
44
16
 
45
- __all__ = (
17
+ __all__ = [
18
+ "GrammarContext",
46
19
  "CANON_COMPAT",
47
20
  "CANON_FALLBACK",
48
21
  "enforce_canonical_grammar",
49
22
  "on_applied_glyph",
50
23
  "apply_glyph_with_grammar",
51
- "GrammarContext",
52
- )
53
-
54
- # -------------------------
55
- # Per-node grammar state
56
- # -------------------------
57
-
58
-
59
- def _gram_state(nd: dict[str, Any]) -> dict[str, Any]:
60
- """Create or return the node grammar state.
61
-
62
- Fields:
63
- - thol_open (bool)
64
- - thol_len (int)
65
- """
66
- return nd.setdefault("_GRAM", {"thol_open": False, "thol_len": 0})
67
-
68
-
69
- # -------------------------
70
- # Canonical compatibilities (allowed next glyphs)
71
- # -------------------------
72
- CANON_COMPAT: dict[Glyph, set[Glyph]] = {
73
- # Inicio / apertura
74
- Glyph.AL: {Glyph.EN, Glyph.RA, Glyph.NAV, Glyph.VAL, Glyph.UM},
75
- Glyph.EN: {Glyph.IL, Glyph.UM, Glyph.RA, Glyph.NAV},
76
- # Estabilización / difusión / acople
77
- Glyph.IL: {Glyph.RA, Glyph.VAL, Glyph.UM, Glyph.SHA},
78
- Glyph.UM: {Glyph.RA, Glyph.IL, Glyph.VAL, Glyph.NAV},
79
- Glyph.RA: {Glyph.IL, Glyph.VAL, Glyph.UM, Glyph.NAV},
80
- Glyph.VAL: {Glyph.UM, Glyph.RA, Glyph.IL, Glyph.NAV},
81
- # Disonancia → transición → mutación
82
- Glyph.OZ: {Glyph.ZHIR, Glyph.NAV},
83
- Glyph.ZHIR: {Glyph.IL, Glyph.NAV},
84
- Glyph.NAV: {Glyph.OZ, Glyph.ZHIR, Glyph.RA, Glyph.IL, Glyph.UM},
85
- # Cierres / latencias
86
- Glyph.SHA: {Glyph.AL, Glyph.EN},
87
- Glyph.NUL: {Glyph.AL, Glyph.IL},
88
- # Bloques autoorganizativos
89
- Glyph.THOL: {
90
- Glyph.OZ,
91
- Glyph.ZHIR,
92
- Glyph.NAV,
93
- Glyph.RA,
94
- Glyph.IL,
95
- Glyph.UM,
96
- Glyph.SHA,
97
- Glyph.NUL,
98
- },
99
- }
100
-
101
- # Canonical fallbacks when a transition is not allowed
102
- CANON_FALLBACK: dict[Glyph, Glyph] = {
103
- Glyph.AL: Glyph.EN,
104
- Glyph.EN: Glyph.IL,
105
- Glyph.IL: Glyph.RA,
106
- Glyph.NAV: Glyph.RA,
107
- Glyph.NUL: Glyph.AL,
108
- Glyph.OZ: Glyph.ZHIR,
109
- Glyph.RA: Glyph.IL,
110
- Glyph.SHA: Glyph.AL,
111
- Glyph.THOL: Glyph.NAV,
112
- Glyph.UM: Glyph.RA,
113
- Glyph.VAL: Glyph.RA,
114
- Glyph.ZHIR: Glyph.IL,
115
- }
116
-
117
-
118
- def _coerce_glyph(val: Any) -> Glyph | Any:
119
- """Return ``val`` as ``Glyph`` when possible."""
120
- try:
121
- return Glyph(val)
122
- except (ValueError, TypeError):
123
- return val
124
-
125
-
126
- def _glyph_fallback(cand_key: str, fallbacks: dict[str, Any]) -> Glyph | str:
127
- """Determine fallback glyph for ``cand_key`` and return converted value."""
128
- glyph_key = _coerce_glyph(cand_key)
129
- canon_fb = (
130
- CANON_FALLBACK.get(glyph_key, cand_key)
131
- if isinstance(glyph_key, Glyph)
132
- else cand_key
133
- )
134
- fb = fallbacks.get(cand_key, canon_fb)
135
- return _coerce_glyph(fb)
136
-
137
-
138
- # -------------------------
139
- # THOL closures and ZHIR preconditions
140
- # -------------------------
141
-
142
-
143
- def get_norm(ctx: GrammarContext, key: str) -> float:
144
- """Retrieve a global normalisation value from ``ctx.norms``."""
145
- return float(ctx.norms.get(key, 1.0)) or 1.0
146
-
147
-
148
- def _norm_attr(ctx: GrammarContext, nd, attr_alias: str, norm_key: str) -> float:
149
- """Normalise ``attr_alias`` using the global maximum ``norm_key``."""
150
-
151
- max_val = get_norm(ctx, norm_key)
152
- return clamp01(abs(get_attr(nd, attr_alias, 0.0)) / max_val)
153
-
154
-
155
- def _si(nd) -> float:
156
- return clamp01(get_attr(nd, ALIAS_SI, 0.5))
157
-
158
-
159
- def _accel_norm(ctx: GrammarContext, nd) -> float:
160
- """Normalise acceleration using the global maximum."""
161
- return _norm_attr(ctx, nd, ALIAS_D2EPI, "accel_max")
162
-
163
-
164
- def _check_repeats(ctx: GrammarContext, n, cand: Glyph | str) -> Glyph | str:
165
- """Avoid recent repetitions according to ``ctx.cfg_soft``."""
166
- nd = ctx.G.nodes[n]
167
- cfg = ctx.cfg_soft
168
- gwin = int(cfg.get("window", 0))
169
- avoid = set(cfg.get("avoid_repeats", []))
170
- fallbacks = cfg.get("fallbacks", {})
171
- cand_key = cand.value if isinstance(cand, Glyph) else str(cand)
172
- if gwin > 0 and cand_key in avoid and recent_glyph(nd, cand_key, gwin):
173
- return _glyph_fallback(cand_key, fallbacks)
174
- return cand
175
-
176
-
177
- def _maybe_force(
178
- ctx: GrammarContext,
179
- n,
180
- cand: Glyph | str,
181
- original: Glyph | str,
182
- accessor: Callable[[GrammarContext, dict[str, Any]], float],
183
- key: str,
184
- ) -> Glyph | str:
185
- """Restore ``original`` if ``accessor`` exceeds ``key`` threshold."""
186
- if cand == original:
187
- return cand
188
- force_th = float(ctx.cfg_soft.get(key, 0.60))
189
- if accessor(ctx, ctx.G.nodes[n]) >= force_th:
190
- return original
191
- return cand
192
-
193
-
194
- def _check_oz_to_zhir(ctx: GrammarContext, n, cand: Glyph | str) -> Glyph | str:
195
- nd = ctx.G.nodes[n]
196
- cand_glyph = _coerce_glyph(cand)
197
- if cand_glyph == Glyph.ZHIR:
198
- cfg = ctx.cfg_canon
199
- win = int(cfg.get("zhir_requires_oz_window", 3))
200
- dn_min = float(cfg.get("zhir_dnfr_min", 0.05))
201
- dnfr_max = get_norm(ctx, "dnfr_max")
202
- if (
203
- not recent_glyph(nd, Glyph.OZ, win)
204
- and normalize_dnfr(nd, dnfr_max) < dn_min
205
- ):
206
- return Glyph.OZ
207
- return cand
208
-
209
-
210
- def _check_thol_closure(
211
- ctx: GrammarContext, n, cand: Glyph | str, st: dict[str, Any]
212
- ) -> Glyph | str:
213
- nd = ctx.G.nodes[n]
214
- if st.get("thol_open", False):
215
- st["thol_len"] = int(st.get("thol_len", 0)) + 1
216
- cfg = ctx.cfg_canon
217
- minlen = int(cfg.get("thol_min_len", 2))
218
- maxlen = int(cfg.get("thol_max_len", 6))
219
- close_dn = float(cfg.get("thol_close_dnfr", 0.15))
220
- dnfr_max = get_norm(ctx, "dnfr_max")
221
- if st["thol_len"] >= maxlen or (
222
- st["thol_len"] >= minlen
223
- and normalize_dnfr(nd, dnfr_max) <= close_dn
224
- ):
225
- return (
226
- Glyph.NUL
227
- if _si(nd) >= float(cfg.get("si_high", 0.66))
228
- else Glyph.SHA
229
- )
230
- return cand
231
-
232
-
233
- def _check_compatibility(ctx: GrammarContext, n, cand: Glyph | str) -> Glyph | str:
234
- nd = ctx.G.nodes[n]
235
- hist = nd.get("glyph_history")
236
- prev = hist[-1] if hist else None
237
- prev_glyph = _coerce_glyph(prev)
238
- cand_glyph = _coerce_glyph(cand)
239
- if isinstance(prev_glyph, Glyph):
240
- allowed = CANON_COMPAT.get(prev_glyph)
241
- if allowed is None:
242
- return cand
243
- if isinstance(cand_glyph, Glyph):
244
- if cand_glyph not in allowed:
245
- return CANON_FALLBACK.get(prev_glyph, cand_glyph)
246
- else:
247
- return CANON_FALLBACK.get(prev_glyph, cand)
248
- return cand
249
-
250
-
251
- # -------------------------
252
- # Core: enforce grammar on a candidate
253
- # -------------------------
254
-
255
-
256
- def enforce_canonical_grammar(
257
- G, n, cand: Glyph | str, ctx: Optional[GrammarContext] = None
258
- ) -> Glyph | str:
259
- """Validate and adjust a candidate glyph according to canonical grammar.
260
-
261
- Key rules:
262
- - Repeat window with forces based on |ΔNFR| and acceleration.
263
- - Transition compatibilities (TNFR path).
264
- - OZ→ZHIR: mutation requires recent dissonance or high |ΔNFR|.
265
- - THOL[...]: forces closure with SHA or NUL when the field stabilises
266
- or block length is reached; maintains per-node state.
267
-
268
- Returns the effective glyph to apply.
269
- """
270
- if ctx is None:
271
- ctx = GrammarContext.from_graph(G)
272
-
273
- nd = ctx.G.nodes[n]
274
- st = _gram_state(nd)
275
-
276
- raw_cand = cand
277
- cand = _coerce_glyph(cand)
278
- input_was_str = isinstance(raw_cand, str)
279
-
280
- # 0) If glyphs outside the alphabet arrive, leave untouched
281
- if not isinstance(cand, Glyph) or cand not in CANON_COMPAT:
282
- return raw_cand if input_was_str else cand
283
-
284
- original = cand
285
- cand = _check_repeats(ctx, n, cand)
286
-
287
- dnfr_accessor = lambda ctx, nd: normalize_dnfr(nd, get_norm(ctx, "dnfr_max"))
288
- cand = _maybe_force(ctx, n, cand, original, dnfr_accessor, "force_dnfr")
289
- cand = _maybe_force(ctx, n, cand, original, _accel_norm, "force_accel")
290
- cand = _check_oz_to_zhir(ctx, n, cand)
291
- cand = _check_thol_closure(ctx, n, cand, st)
292
- cand = _check_compatibility(ctx, n, cand)
293
-
294
- coerced_final = _coerce_glyph(cand)
295
- if input_was_str:
296
- if isinstance(coerced_final, Glyph):
297
- return coerced_final.value
298
- return str(cand)
299
- return coerced_final if isinstance(coerced_final, Glyph) else cand
300
-
301
-
302
- # -------------------------
303
- # Post-selection: update grammar state
304
- # -------------------------
305
-
306
-
307
- def on_applied_glyph(G, n, applied: str) -> None:
308
- nd = G.nodes[n]
309
- st = _gram_state(nd)
310
- if applied == Glyph.THOL:
311
- st["thol_open"] = True
312
- st["thol_len"] = 0
313
- elif applied in (Glyph.SHA, Glyph.NUL):
314
- st["thol_open"] = False
315
- st["thol_len"] = 0
316
-
317
-
318
- # -------------------------
319
- # Direct application with canonical grammar
320
- # -------------------------
321
-
322
-
323
- def apply_glyph_with_grammar(
324
- G,
325
- nodes: Optional[Iterable[Any]],
326
- glyph: Glyph | str,
327
- window: Optional[int] = None,
328
- ) -> None:
329
- """Apply ``glyph`` to ``nodes`` enforcing the canonical grammar.
330
-
331
- ``nodes`` may be a ``NodeView`` or any iterable. The iterable is consumed
332
- directly to avoid unnecessary materialisation; callers must materialise if
333
- they need indexing.
334
- """
335
- if window is None:
336
- window = get_param(G, "GLYPH_HYSTERESIS_WINDOW")
337
-
338
- g_str = glyph.value if isinstance(glyph, Glyph) else str(glyph)
339
- iter_nodes = G.nodes() if nodes is None else nodes
340
- ctx = GrammarContext.from_graph(G)
341
- for n in iter_nodes:
342
- g_eff = enforce_canonical_grammar(G, n, g_str, ctx)
343
- apply_glyph(G, n, g_eff, window=window)
344
- on_applied_glyph(G, n, g_eff)
24
+ "_gram_state",
25
+ ]
tnfr/grammar.pyi ADDED
@@ -0,0 +1,13 @@
1
+ from typing import Any
2
+
3
+ __all__: Any
4
+
5
+ def __getattr__(name: str) -> Any: ...
6
+
7
+ CANON_COMPAT: Any
8
+ CANON_FALLBACK: Any
9
+ GrammarContext: Any
10
+ _gram_state: Any
11
+ apply_glyph_with_grammar: Any
12
+ enforce_canonical_grammar: Any
13
+ on_applied_glyph: Any