tnfr 4.5.0__py3-none-any.whl → 4.5.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +91 -89
- tnfr/alias.py +546 -0
- tnfr/cache.py +578 -0
- tnfr/callback_utils.py +388 -0
- tnfr/cli/__init__.py +75 -0
- tnfr/cli/arguments.py +177 -0
- tnfr/cli/execution.py +288 -0
- tnfr/cli/utils.py +36 -0
- tnfr/collections_utils.py +300 -0
- tnfr/config.py +19 -28
- tnfr/constants/__init__.py +174 -0
- tnfr/constants/core.py +159 -0
- tnfr/constants/init.py +31 -0
- tnfr/constants/metric.py +110 -0
- tnfr/constants_glyphs.py +98 -0
- tnfr/dynamics/__init__.py +658 -0
- tnfr/dynamics/dnfr.py +733 -0
- tnfr/dynamics/integrators.py +267 -0
- tnfr/dynamics/sampling.py +31 -0
- tnfr/execution.py +201 -0
- tnfr/flatten.py +283 -0
- tnfr/gamma.py +302 -88
- tnfr/glyph_history.py +290 -0
- tnfr/grammar.py +285 -96
- tnfr/graph_utils.py +84 -0
- tnfr/helpers/__init__.py +71 -0
- tnfr/helpers/numeric.py +87 -0
- tnfr/immutable.py +178 -0
- tnfr/import_utils.py +228 -0
- tnfr/initialization.py +197 -0
- tnfr/io.py +246 -0
- tnfr/json_utils.py +162 -0
- tnfr/locking.py +37 -0
- tnfr/logging_utils.py +116 -0
- tnfr/metrics/__init__.py +41 -0
- tnfr/metrics/coherence.py +829 -0
- tnfr/metrics/common.py +151 -0
- tnfr/metrics/core.py +101 -0
- tnfr/metrics/diagnosis.py +234 -0
- tnfr/metrics/export.py +137 -0
- tnfr/metrics/glyph_timing.py +189 -0
- tnfr/metrics/reporting.py +148 -0
- tnfr/metrics/sense_index.py +120 -0
- tnfr/metrics/trig.py +181 -0
- tnfr/metrics/trig_cache.py +109 -0
- tnfr/node.py +214 -159
- tnfr/observers.py +126 -128
- tnfr/ontosim.py +134 -134
- tnfr/operators/__init__.py +420 -0
- tnfr/operators/jitter.py +203 -0
- tnfr/operators/remesh.py +485 -0
- tnfr/presets.py +46 -14
- tnfr/rng.py +254 -0
- tnfr/selector.py +210 -0
- tnfr/sense.py +284 -131
- tnfr/structural.py +207 -79
- tnfr/tokens.py +60 -0
- tnfr/trace.py +329 -94
- tnfr/types.py +43 -17
- tnfr/validators.py +70 -24
- tnfr/value_utils.py +59 -0
- tnfr-4.5.2.dist-info/METADATA +379 -0
- tnfr-4.5.2.dist-info/RECORD +67 -0
- tnfr/cli.py +0 -322
- tnfr/constants.py +0 -277
- tnfr/dynamics.py +0 -814
- tnfr/helpers.py +0 -264
- tnfr/main.py +0 -47
- tnfr/metrics.py +0 -597
- tnfr/operators.py +0 -525
- tnfr/program.py +0 -176
- tnfr/scenarios.py +0 -34
- tnfr-4.5.0.dist-info/METADATA +0 -109
- tnfr-4.5.0.dist-info/RECORD +0 -28
- {tnfr-4.5.0.dist-info → tnfr-4.5.2.dist-info}/WHEEL +0 -0
- {tnfr-4.5.0.dist-info → tnfr-4.5.2.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.0.dist-info → tnfr-4.5.2.dist-info}/licenses/LICENSE.md +0 -0
- {tnfr-4.5.0.dist-info → tnfr-4.5.2.dist-info}/top_level.txt +0 -0
tnfr/grammar.py
CHANGED
|
@@ -1,155 +1,344 @@
|
|
|
1
|
+
"""Grammar rules."""
|
|
2
|
+
|
|
1
3
|
from __future__ import annotations
|
|
2
|
-
from
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import Any, Iterable, Optional, Callable
|
|
3
6
|
|
|
4
|
-
from .constants import
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
from .
|
|
9
|
-
from
|
|
7
|
+
from .constants import DEFAULTS, get_aliases, get_param
|
|
8
|
+
from .alias import get_attr
|
|
9
|
+
from .helpers.numeric import clamp01
|
|
10
|
+
from .glyph_history import recent_glyph
|
|
11
|
+
from .types import Glyph
|
|
12
|
+
from .operators import apply_glyph # avoid repeated import inside functions
|
|
13
|
+
from .metrics.common import normalize_dnfr
|
|
14
|
+
|
|
15
|
+
ALIAS_SI = get_aliases("SI")
|
|
16
|
+
ALIAS_D2EPI = get_aliases("D2EPI")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class GrammarContext:
|
|
21
|
+
"""Shared context for grammar helpers.
|
|
22
|
+
|
|
23
|
+
Collects graph-level settings to reduce positional parameters across
|
|
24
|
+
helper functions.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
G: Any
|
|
28
|
+
cfg_soft: dict[str, Any]
|
|
29
|
+
cfg_canon: dict[str, Any]
|
|
30
|
+
norms: dict[str, Any]
|
|
10
31
|
|
|
11
|
-
|
|
12
|
-
|
|
32
|
+
@classmethod
|
|
33
|
+
def from_graph(cls, G: Any) -> "GrammarContext":
|
|
34
|
+
"""Create a :class:`GrammarContext` for ``G``."""
|
|
35
|
+
return cls(
|
|
36
|
+
G=G,
|
|
37
|
+
cfg_soft=G.graph.get("GRAMMAR", DEFAULTS.get("GRAMMAR", {})),
|
|
38
|
+
cfg_canon=G.graph.get(
|
|
39
|
+
"GRAMMAR_CANON", DEFAULTS.get("GRAMMAR_CANON", {})
|
|
40
|
+
),
|
|
41
|
+
norms=G.graph.get("_sel_norms") or {},
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
__all__ = (
|
|
46
|
+
"CANON_COMPAT",
|
|
47
|
+
"CANON_FALLBACK",
|
|
48
|
+
"enforce_canonical_grammar",
|
|
49
|
+
"on_applied_glyph",
|
|
50
|
+
"apply_glyph_with_grammar",
|
|
51
|
+
"GrammarContext",
|
|
52
|
+
)
|
|
13
53
|
|
|
14
54
|
# -------------------------
|
|
15
|
-
#
|
|
55
|
+
# Per-node grammar state
|
|
16
56
|
# -------------------------
|
|
17
57
|
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
58
|
+
|
|
59
|
+
def _gram_state(nd: dict[str, Any]) -> dict[str, Any]:
|
|
60
|
+
"""Create or return the node grammar state.
|
|
61
|
+
|
|
62
|
+
Fields:
|
|
21
63
|
- thol_open (bool)
|
|
22
64
|
- thol_len (int)
|
|
23
65
|
"""
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
st.setdefault("thol_len", 0)
|
|
27
|
-
return st
|
|
66
|
+
return nd.setdefault("_GRAM", {"thol_open": False, "thol_len": 0})
|
|
67
|
+
|
|
28
68
|
|
|
29
69
|
# -------------------------
|
|
30
|
-
#
|
|
70
|
+
# Canonical compatibilities (allowed next glyphs)
|
|
31
71
|
# -------------------------
|
|
32
|
-
CANON_COMPAT:
|
|
72
|
+
CANON_COMPAT: dict[Glyph, set[Glyph]] = {
|
|
33
73
|
# Inicio / apertura
|
|
34
|
-
AL:
|
|
35
|
-
EN:
|
|
74
|
+
Glyph.AL: {Glyph.EN, Glyph.RA, Glyph.NAV, Glyph.VAL, Glyph.UM},
|
|
75
|
+
Glyph.EN: {Glyph.IL, Glyph.UM, Glyph.RA, Glyph.NAV},
|
|
36
76
|
# Estabilización / difusión / acople
|
|
37
|
-
IL:
|
|
38
|
-
UM:
|
|
39
|
-
RA:
|
|
40
|
-
VAL:
|
|
77
|
+
Glyph.IL: {Glyph.RA, Glyph.VAL, Glyph.UM, Glyph.SHA},
|
|
78
|
+
Glyph.UM: {Glyph.RA, Glyph.IL, Glyph.VAL, Glyph.NAV},
|
|
79
|
+
Glyph.RA: {Glyph.IL, Glyph.VAL, Glyph.UM, Glyph.NAV},
|
|
80
|
+
Glyph.VAL: {Glyph.UM, Glyph.RA, Glyph.IL, Glyph.NAV},
|
|
41
81
|
# Disonancia → transición → mutación
|
|
42
|
-
OZ:
|
|
43
|
-
ZHIR: {IL, NAV},
|
|
44
|
-
NAV:
|
|
82
|
+
Glyph.OZ: {Glyph.ZHIR, Glyph.NAV},
|
|
83
|
+
Glyph.ZHIR: {Glyph.IL, Glyph.NAV},
|
|
84
|
+
Glyph.NAV: {Glyph.OZ, Glyph.ZHIR, Glyph.RA, Glyph.IL, Glyph.UM},
|
|
45
85
|
# Cierres / latencias
|
|
46
|
-
SHA:
|
|
47
|
-
NUL:
|
|
86
|
+
Glyph.SHA: {Glyph.AL, Glyph.EN},
|
|
87
|
+
Glyph.NUL: {Glyph.AL, Glyph.IL},
|
|
48
88
|
# Bloques autoorganizativos
|
|
49
|
-
THOL: {
|
|
89
|
+
Glyph.THOL: {
|
|
90
|
+
Glyph.OZ,
|
|
91
|
+
Glyph.ZHIR,
|
|
92
|
+
Glyph.NAV,
|
|
93
|
+
Glyph.RA,
|
|
94
|
+
Glyph.IL,
|
|
95
|
+
Glyph.UM,
|
|
96
|
+
Glyph.SHA,
|
|
97
|
+
Glyph.NUL,
|
|
98
|
+
},
|
|
50
99
|
}
|
|
51
100
|
|
|
52
|
-
#
|
|
53
|
-
CANON_FALLBACK:
|
|
54
|
-
AL: EN,
|
|
101
|
+
# Canonical fallbacks when a transition is not allowed
|
|
102
|
+
CANON_FALLBACK: dict[Glyph, Glyph] = {
|
|
103
|
+
Glyph.AL: Glyph.EN,
|
|
104
|
+
Glyph.EN: Glyph.IL,
|
|
105
|
+
Glyph.IL: Glyph.RA,
|
|
106
|
+
Glyph.NAV: Glyph.RA,
|
|
107
|
+
Glyph.NUL: Glyph.AL,
|
|
108
|
+
Glyph.OZ: Glyph.ZHIR,
|
|
109
|
+
Glyph.RA: Glyph.IL,
|
|
110
|
+
Glyph.SHA: Glyph.AL,
|
|
111
|
+
Glyph.THOL: Glyph.NAV,
|
|
112
|
+
Glyph.UM: Glyph.RA,
|
|
113
|
+
Glyph.VAL: Glyph.RA,
|
|
114
|
+
Glyph.ZHIR: Glyph.IL,
|
|
55
115
|
}
|
|
56
116
|
|
|
57
|
-
# -------------------------
|
|
58
|
-
# Cierres T’HOL y precondiciones Z’HIR
|
|
59
|
-
# -------------------------
|
|
60
117
|
|
|
61
|
-
def
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
118
|
+
def _coerce_glyph(val: Any) -> Glyph | Any:
|
|
119
|
+
"""Return ``val`` as ``Glyph`` when possible."""
|
|
120
|
+
try:
|
|
121
|
+
return Glyph(val)
|
|
122
|
+
except (ValueError, TypeError):
|
|
123
|
+
return val
|
|
124
|
+
|
|
66
125
|
|
|
126
|
+
def _glyph_fallback(cand_key: str, fallbacks: dict[str, Any]) -> Glyph | str:
|
|
127
|
+
"""Determine fallback glyph for ``cand_key`` and return converted value."""
|
|
128
|
+
glyph_key = _coerce_glyph(cand_key)
|
|
129
|
+
canon_fb = (
|
|
130
|
+
CANON_FALLBACK.get(glyph_key, cand_key)
|
|
131
|
+
if isinstance(glyph_key, Glyph)
|
|
132
|
+
else cand_key
|
|
133
|
+
)
|
|
134
|
+
fb = fallbacks.get(cand_key, canon_fb)
|
|
135
|
+
return _coerce_glyph(fb)
|
|
67
136
|
|
|
68
|
-
def _si(G, nd) -> float:
|
|
69
|
-
return clamp01(_get_attr(nd, ALIAS_SI, 0.5))
|
|
70
137
|
|
|
71
138
|
# -------------------------
|
|
72
|
-
#
|
|
139
|
+
# THOL closures and ZHIR preconditions
|
|
73
140
|
# -------------------------
|
|
74
141
|
|
|
75
|
-
def enforce_canonical_grammar(G, n, cand: str) -> str:
|
|
76
|
-
"""Valida/ajusta el glifo candidato según la gramática canónica.
|
|
77
142
|
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
- T’HOL[...]: obliga cierre con SH’A o NU’L cuando el campo se estabiliza
|
|
82
|
-
o se alcanza el largo del bloque; mantiene estado por nodo.
|
|
143
|
+
def get_norm(ctx: GrammarContext, key: str) -> float:
|
|
144
|
+
"""Retrieve a global normalisation value from ``ctx.norms``."""
|
|
145
|
+
return float(ctx.norms.get(key, 1.0)) or 1.0
|
|
83
146
|
|
|
84
|
-
Devuelve el glifo efectivo a aplicar.
|
|
85
|
-
"""
|
|
86
|
-
nd = G.nodes[n]
|
|
87
|
-
st = _gram_state(nd)
|
|
88
|
-
cfg = G.graph.get("GRAMMAR_CANON", DEFAULTS.get("GRAMMAR_CANON", {}))
|
|
89
147
|
|
|
90
|
-
|
|
91
|
-
|
|
148
|
+
def _norm_attr(ctx: GrammarContext, nd, attr_alias: str, norm_key: str) -> float:
|
|
149
|
+
"""Normalise ``attr_alias`` using the global maximum ``norm_key``."""
|
|
150
|
+
|
|
151
|
+
max_val = get_norm(ctx, norm_key)
|
|
152
|
+
return clamp01(abs(get_attr(nd, attr_alias, 0.0)) / max_val)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def _si(nd) -> float:
|
|
156
|
+
return clamp01(get_attr(nd, ALIAS_SI, 0.5))
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def _accel_norm(ctx: GrammarContext, nd) -> float:
|
|
160
|
+
"""Normalise acceleration using the global maximum."""
|
|
161
|
+
return _norm_attr(ctx, nd, ALIAS_D2EPI, "accel_max")
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _check_repeats(ctx: GrammarContext, n, cand: Glyph | str) -> Glyph | str:
|
|
165
|
+
"""Avoid recent repetitions according to ``ctx.cfg_soft``."""
|
|
166
|
+
nd = ctx.G.nodes[n]
|
|
167
|
+
cfg = ctx.cfg_soft
|
|
168
|
+
gwin = int(cfg.get("window", 0))
|
|
169
|
+
avoid = set(cfg.get("avoid_repeats", []))
|
|
170
|
+
fallbacks = cfg.get("fallbacks", {})
|
|
171
|
+
cand_key = cand.value if isinstance(cand, Glyph) else str(cand)
|
|
172
|
+
if gwin > 0 and cand_key in avoid and recent_glyph(nd, cand_key, gwin):
|
|
173
|
+
return _glyph_fallback(cand_key, fallbacks)
|
|
174
|
+
return cand
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def _maybe_force(
|
|
178
|
+
ctx: GrammarContext,
|
|
179
|
+
n,
|
|
180
|
+
cand: Glyph | str,
|
|
181
|
+
original: Glyph | str,
|
|
182
|
+
accessor: Callable[[GrammarContext, dict[str, Any]], float],
|
|
183
|
+
key: str,
|
|
184
|
+
) -> Glyph | str:
|
|
185
|
+
"""Restore ``original`` if ``accessor`` exceeds ``key`` threshold."""
|
|
186
|
+
if cand == original:
|
|
92
187
|
return cand
|
|
188
|
+
force_th = float(ctx.cfg_soft.get(key, 0.60))
|
|
189
|
+
if accessor(ctx, ctx.G.nodes[n]) >= force_th:
|
|
190
|
+
return original
|
|
191
|
+
return cand
|
|
93
192
|
|
|
94
|
-
|
|
95
|
-
|
|
193
|
+
|
|
194
|
+
def _check_oz_to_zhir(ctx: GrammarContext, n, cand: Glyph | str) -> Glyph | str:
|
|
195
|
+
nd = ctx.G.nodes[n]
|
|
196
|
+
cand_glyph = _coerce_glyph(cand)
|
|
197
|
+
if cand_glyph == Glyph.ZHIR:
|
|
198
|
+
cfg = ctx.cfg_canon
|
|
96
199
|
win = int(cfg.get("zhir_requires_oz_window", 3))
|
|
97
200
|
dn_min = float(cfg.get("zhir_dnfr_min", 0.05))
|
|
98
|
-
|
|
99
|
-
|
|
201
|
+
dnfr_max = get_norm(ctx, "dnfr_max")
|
|
202
|
+
if (
|
|
203
|
+
not recent_glyph(nd, Glyph.OZ, win)
|
|
204
|
+
and normalize_dnfr(nd, dnfr_max) < dn_min
|
|
205
|
+
):
|
|
206
|
+
return Glyph.OZ
|
|
207
|
+
return cand
|
|
100
208
|
|
|
101
|
-
|
|
209
|
+
|
|
210
|
+
def _check_thol_closure(
|
|
211
|
+
ctx: GrammarContext, n, cand: Glyph | str, st: dict[str, Any]
|
|
212
|
+
) -> Glyph | str:
|
|
213
|
+
nd = ctx.G.nodes[n]
|
|
102
214
|
if st.get("thol_open", False):
|
|
103
|
-
st["thol_len"] = int(st.get("thol_len", 0))
|
|
104
|
-
|
|
215
|
+
st["thol_len"] = int(st.get("thol_len", 0)) + 1
|
|
216
|
+
cfg = ctx.cfg_canon
|
|
105
217
|
minlen = int(cfg.get("thol_min_len", 2))
|
|
106
218
|
maxlen = int(cfg.get("thol_max_len", 6))
|
|
107
219
|
close_dn = float(cfg.get("thol_close_dnfr", 0.15))
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
cand = CANON_FALLBACK.get(prev, cand)
|
|
220
|
+
dnfr_max = get_norm(ctx, "dnfr_max")
|
|
221
|
+
if st["thol_len"] >= maxlen or (
|
|
222
|
+
st["thol_len"] >= minlen
|
|
223
|
+
and normalize_dnfr(nd, dnfr_max) <= close_dn
|
|
224
|
+
):
|
|
225
|
+
return (
|
|
226
|
+
Glyph.NUL
|
|
227
|
+
if _si(nd) >= float(cfg.get("si_high", 0.66))
|
|
228
|
+
else Glyph.SHA
|
|
229
|
+
)
|
|
230
|
+
return cand
|
|
231
|
+
|
|
121
232
|
|
|
233
|
+
def _check_compatibility(ctx: GrammarContext, n, cand: Glyph | str) -> Glyph | str:
|
|
234
|
+
nd = ctx.G.nodes[n]
|
|
235
|
+
hist = nd.get("glyph_history")
|
|
236
|
+
prev = hist[-1] if hist else None
|
|
237
|
+
prev_glyph = _coerce_glyph(prev)
|
|
238
|
+
cand_glyph = _coerce_glyph(cand)
|
|
239
|
+
if isinstance(prev_glyph, Glyph):
|
|
240
|
+
allowed = CANON_COMPAT.get(prev_glyph)
|
|
241
|
+
if allowed is None:
|
|
242
|
+
return cand
|
|
243
|
+
if isinstance(cand_glyph, Glyph):
|
|
244
|
+
if cand_glyph not in allowed:
|
|
245
|
+
return CANON_FALLBACK.get(prev_glyph, cand_glyph)
|
|
246
|
+
else:
|
|
247
|
+
return CANON_FALLBACK.get(prev_glyph, cand)
|
|
122
248
|
return cand
|
|
123
249
|
|
|
250
|
+
|
|
251
|
+
# -------------------------
|
|
252
|
+
# Core: enforce grammar on a candidate
|
|
253
|
+
# -------------------------
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def enforce_canonical_grammar(
|
|
257
|
+
G, n, cand: Glyph | str, ctx: Optional[GrammarContext] = None
|
|
258
|
+
) -> Glyph | str:
|
|
259
|
+
"""Validate and adjust a candidate glyph according to canonical grammar.
|
|
260
|
+
|
|
261
|
+
Key rules:
|
|
262
|
+
- Repeat window with forces based on |ΔNFR| and acceleration.
|
|
263
|
+
- Transition compatibilities (TNFR path).
|
|
264
|
+
- OZ→ZHIR: mutation requires recent dissonance or high |ΔNFR|.
|
|
265
|
+
- THOL[...]: forces closure with SHA or NUL when the field stabilises
|
|
266
|
+
or block length is reached; maintains per-node state.
|
|
267
|
+
|
|
268
|
+
Returns the effective glyph to apply.
|
|
269
|
+
"""
|
|
270
|
+
if ctx is None:
|
|
271
|
+
ctx = GrammarContext.from_graph(G)
|
|
272
|
+
|
|
273
|
+
nd = ctx.G.nodes[n]
|
|
274
|
+
st = _gram_state(nd)
|
|
275
|
+
|
|
276
|
+
raw_cand = cand
|
|
277
|
+
cand = _coerce_glyph(cand)
|
|
278
|
+
input_was_str = isinstance(raw_cand, str)
|
|
279
|
+
|
|
280
|
+
# 0) If glyphs outside the alphabet arrive, leave untouched
|
|
281
|
+
if not isinstance(cand, Glyph) or cand not in CANON_COMPAT:
|
|
282
|
+
return raw_cand if input_was_str else cand
|
|
283
|
+
|
|
284
|
+
original = cand
|
|
285
|
+
cand = _check_repeats(ctx, n, cand)
|
|
286
|
+
|
|
287
|
+
dnfr_accessor = lambda ctx, nd: normalize_dnfr(nd, get_norm(ctx, "dnfr_max"))
|
|
288
|
+
cand = _maybe_force(ctx, n, cand, original, dnfr_accessor, "force_dnfr")
|
|
289
|
+
cand = _maybe_force(ctx, n, cand, original, _accel_norm, "force_accel")
|
|
290
|
+
cand = _check_oz_to_zhir(ctx, n, cand)
|
|
291
|
+
cand = _check_thol_closure(ctx, n, cand, st)
|
|
292
|
+
cand = _check_compatibility(ctx, n, cand)
|
|
293
|
+
|
|
294
|
+
coerced_final = _coerce_glyph(cand)
|
|
295
|
+
if input_was_str:
|
|
296
|
+
if isinstance(coerced_final, Glyph):
|
|
297
|
+
return coerced_final.value
|
|
298
|
+
return str(cand)
|
|
299
|
+
return coerced_final if isinstance(coerced_final, Glyph) else cand
|
|
300
|
+
|
|
301
|
+
|
|
124
302
|
# -------------------------
|
|
125
|
-
# Post-
|
|
303
|
+
# Post-selection: update grammar state
|
|
126
304
|
# -------------------------
|
|
127
305
|
|
|
128
|
-
|
|
306
|
+
|
|
307
|
+
def on_applied_glyph(G, n, applied: str) -> None:
|
|
129
308
|
nd = G.nodes[n]
|
|
130
309
|
st = _gram_state(nd)
|
|
131
|
-
if applied == THOL:
|
|
310
|
+
if applied == Glyph.THOL:
|
|
132
311
|
st["thol_open"] = True
|
|
133
312
|
st["thol_len"] = 0
|
|
134
|
-
elif applied in (SHA, NUL):
|
|
313
|
+
elif applied in (Glyph.SHA, Glyph.NUL):
|
|
135
314
|
st["thol_open"] = False
|
|
136
315
|
st["thol_len"] = 0
|
|
137
|
-
|
|
138
|
-
pass
|
|
316
|
+
|
|
139
317
|
|
|
140
318
|
# -------------------------
|
|
141
|
-
#
|
|
319
|
+
# Direct application with canonical grammar
|
|
142
320
|
# -------------------------
|
|
143
321
|
|
|
144
|
-
def select_and_apply_with_grammar(G, n, selector, window: int) -> None:
|
|
145
|
-
"""Aplica gramática canónica sobre la propuesta del selector.
|
|
146
322
|
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
323
|
+
def apply_glyph_with_grammar(
|
|
324
|
+
G,
|
|
325
|
+
nodes: Optional[Iterable[Any]],
|
|
326
|
+
glyph: Glyph | str,
|
|
327
|
+
window: Optional[int] = None,
|
|
328
|
+
) -> None:
|
|
329
|
+
"""Apply ``glyph`` to ``nodes`` enforcing the canonical grammar.
|
|
330
|
+
|
|
331
|
+
``nodes`` may be a ``NodeView`` or any iterable. The iterable is consumed
|
|
332
|
+
directly to avoid unnecessary materialisation; callers must materialise if
|
|
333
|
+
they need indexing.
|
|
150
334
|
"""
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
335
|
+
if window is None:
|
|
336
|
+
window = get_param(G, "GLYPH_HYSTERESIS_WINDOW")
|
|
337
|
+
|
|
338
|
+
g_str = glyph.value if isinstance(glyph, Glyph) else str(glyph)
|
|
339
|
+
iter_nodes = G.nodes() if nodes is None else nodes
|
|
340
|
+
ctx = GrammarContext.from_graph(G)
|
|
341
|
+
for n in iter_nodes:
|
|
342
|
+
g_eff = enforce_canonical_grammar(G, n, g_str, ctx)
|
|
343
|
+
apply_glyph(G, n, g_eff, window=window)
|
|
344
|
+
on_applied_glyph(G, n, g_eff)
|
tnfr/graph_utils.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"""Utilities for graph-level bookkeeping.
|
|
2
|
+
|
|
3
|
+
This module centralises helpers that operate on the metadata stored inside
|
|
4
|
+
graph objects. Besides flagging ΔNFR preparation caches it also exposes
|
|
5
|
+
lightweight adapters to obtain the canonical ``graph`` mapping and to read
|
|
6
|
+
validated configuration dictionaries.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import warnings
|
|
12
|
+
from types import MappingProxyType
|
|
13
|
+
from typing import Any, Mapping
|
|
14
|
+
|
|
15
|
+
__all__ = (
|
|
16
|
+
"get_graph",
|
|
17
|
+
"get_graph_mapping",
|
|
18
|
+
"mark_dnfr_prep_dirty",
|
|
19
|
+
"supports_add_edge",
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_graph(obj: Any) -> Any:
|
|
24
|
+
"""Return ``obj.graph`` when present or ``obj`` otherwise."""
|
|
25
|
+
return getattr(obj, "graph", obj)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_graph_mapping(
|
|
29
|
+
G: Any, key: str, warn_msg: str
|
|
30
|
+
) -> Mapping[str, Any] | None:
|
|
31
|
+
"""Return an immutable view of ``G``'s stored mapping for ``key``.
|
|
32
|
+
|
|
33
|
+
The helper normalises access to ``G.graph[key]`` by returning
|
|
34
|
+
``None`` when the key is missing or holds a non-mapping value. When a
|
|
35
|
+
mapping is found it is wrapped in :class:`types.MappingProxyType` to guard
|
|
36
|
+
against accidental mutation. ``warn_msg`` is emitted via
|
|
37
|
+
:func:`warnings.warn` when the stored value is not a mapping.
|
|
38
|
+
"""
|
|
39
|
+
graph = get_graph(G)
|
|
40
|
+
getter = getattr(graph, "get", None)
|
|
41
|
+
if getter is None:
|
|
42
|
+
return None
|
|
43
|
+
|
|
44
|
+
data = getter(key)
|
|
45
|
+
if data is None:
|
|
46
|
+
return None
|
|
47
|
+
if not isinstance(data, Mapping):
|
|
48
|
+
warnings.warn(warn_msg, UserWarning, stacklevel=2)
|
|
49
|
+
return None
|
|
50
|
+
return MappingProxyType(data)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def mark_dnfr_prep_dirty(G: Any) -> None:
|
|
54
|
+
"""Flag ΔNFR preparation data as stale.
|
|
55
|
+
|
|
56
|
+
Parameters
|
|
57
|
+
----------
|
|
58
|
+
G : Any
|
|
59
|
+
Graph-like object whose ``graph`` attribute will receive the
|
|
60
|
+
``"_dnfr_prep_dirty"`` flag.
|
|
61
|
+
|
|
62
|
+
Returns
|
|
63
|
+
-------
|
|
64
|
+
None
|
|
65
|
+
This function mutates ``G`` in place.
|
|
66
|
+
"""
|
|
67
|
+
graph = get_graph(G)
|
|
68
|
+
graph["_dnfr_prep_dirty"] = True
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def supports_add_edge(graph: Any) -> bool:
|
|
72
|
+
"""Return ``True`` if ``graph`` exposes an ``add_edge`` method.
|
|
73
|
+
|
|
74
|
+
Parameters
|
|
75
|
+
----------
|
|
76
|
+
graph : Any
|
|
77
|
+
Object representing a graph.
|
|
78
|
+
|
|
79
|
+
Returns
|
|
80
|
+
-------
|
|
81
|
+
bool
|
|
82
|
+
``True`` when ``graph`` implements ``add_edge``; ``False`` otherwise.
|
|
83
|
+
"""
|
|
84
|
+
return hasattr(graph, "add_edge")
|
tnfr/helpers/__init__.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
"""Curated high-level helpers exposed by :mod:`tnfr.helpers`.
|
|
2
|
+
|
|
3
|
+
The module is intentionally small and surfaces utilities that are stable for
|
|
4
|
+
external use, covering data preparation, glyph history management, and graph
|
|
5
|
+
cache invalidation.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from ..cache import (
|
|
11
|
+
EdgeCacheManager,
|
|
12
|
+
cached_node_list,
|
|
13
|
+
cached_nodes_and_A,
|
|
14
|
+
edge_version_cache,
|
|
15
|
+
edge_version_update,
|
|
16
|
+
ensure_node_index_map,
|
|
17
|
+
ensure_node_offset_map,
|
|
18
|
+
increment_edge_version,
|
|
19
|
+
node_set_checksum,
|
|
20
|
+
stable_json,
|
|
21
|
+
)
|
|
22
|
+
from ..graph_utils import get_graph, get_graph_mapping, mark_dnfr_prep_dirty
|
|
23
|
+
from .numeric import (
|
|
24
|
+
angle_diff,
|
|
25
|
+
clamp,
|
|
26
|
+
clamp01,
|
|
27
|
+
kahan_sum_nd,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def __getattr__(name: str):
|
|
32
|
+
if name in _GLYPH_HISTORY_EXPORTS:
|
|
33
|
+
from .. import glyph_history as _glyph_history
|
|
34
|
+
|
|
35
|
+
value = getattr(_glyph_history, name)
|
|
36
|
+
globals()[name] = value
|
|
37
|
+
return value
|
|
38
|
+
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
|
39
|
+
|
|
40
|
+
__all__ = (
|
|
41
|
+
"EdgeCacheManager",
|
|
42
|
+
"angle_diff",
|
|
43
|
+
"cached_node_list",
|
|
44
|
+
"cached_nodes_and_A",
|
|
45
|
+
"clamp",
|
|
46
|
+
"clamp01",
|
|
47
|
+
"edge_version_cache",
|
|
48
|
+
"edge_version_update",
|
|
49
|
+
"ensure_node_index_map",
|
|
50
|
+
"ensure_node_offset_map",
|
|
51
|
+
"get_graph",
|
|
52
|
+
"get_graph_mapping",
|
|
53
|
+
"increment_edge_version",
|
|
54
|
+
"kahan_sum_nd",
|
|
55
|
+
"mark_dnfr_prep_dirty",
|
|
56
|
+
"node_set_checksum",
|
|
57
|
+
"stable_json",
|
|
58
|
+
"count_glyphs",
|
|
59
|
+
"ensure_history",
|
|
60
|
+
"last_glyph",
|
|
61
|
+
"push_glyph",
|
|
62
|
+
"recent_glyph",
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
_GLYPH_HISTORY_EXPORTS = (
|
|
66
|
+
"count_glyphs",
|
|
67
|
+
"ensure_history",
|
|
68
|
+
"last_glyph",
|
|
69
|
+
"push_glyph",
|
|
70
|
+
"recent_glyph",
|
|
71
|
+
)
|
tnfr/helpers/numeric.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"""Numeric helper functions and compensated summation utilities."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Iterable, Sequence
|
|
6
|
+
import math
|
|
7
|
+
|
|
8
|
+
__all__ = (
|
|
9
|
+
"clamp",
|
|
10
|
+
"clamp01",
|
|
11
|
+
"within_range",
|
|
12
|
+
"similarity_abs",
|
|
13
|
+
"kahan_sum_nd",
|
|
14
|
+
"angle_diff",
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def clamp(x: float, a: float, b: float) -> float:
|
|
19
|
+
"""Return ``x`` clamped to the ``[a, b]`` interval."""
|
|
20
|
+
return max(a, min(b, x))
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def clamp01(x: float) -> float:
|
|
24
|
+
"""Clamp ``x`` to the ``[0,1]`` interval."""
|
|
25
|
+
return clamp(float(x), 0.0, 1.0)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def within_range(val: float, lower: float, upper: float, tol: float = 1e-9) -> bool:
|
|
29
|
+
"""Return ``True`` if ``val`` lies in ``[lower, upper]`` within ``tol``.
|
|
30
|
+
|
|
31
|
+
The comparison uses absolute differences instead of :func:`math.isclose`.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
v = float(val)
|
|
35
|
+
return lower <= v <= upper or abs(v - lower) <= tol or abs(v - upper) <= tol
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _norm01(x: float, lo: float, hi: float) -> float:
|
|
39
|
+
"""Normalize ``x`` to the unit interval given bounds.
|
|
40
|
+
|
|
41
|
+
``lo`` and ``hi`` delimit the original value range. When ``hi`` is not
|
|
42
|
+
greater than ``lo`` the function returns ``0.0`` to avoid division by
|
|
43
|
+
zero. The result is clamped to ``[0,1]``.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
if hi <= lo:
|
|
47
|
+
return 0.0
|
|
48
|
+
return clamp01((float(x) - float(lo)) / (float(hi) - float(lo)))
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def similarity_abs(a: float, b: float, lo: float, hi: float) -> float:
|
|
52
|
+
"""Return absolute similarity of ``a`` and ``b`` over ``[lo, hi]``.
|
|
53
|
+
|
|
54
|
+
It computes ``1`` minus the normalized absolute difference between
|
|
55
|
+
``a`` and ``b``. Values are scaled using :func:`_norm01` so the result
|
|
56
|
+
falls within ``[0,1]``.
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
return 1.0 - _norm01(abs(float(a) - float(b)), 0.0, hi - lo)
|
|
60
|
+
|
|
61
|
+
def kahan_sum_nd(
|
|
62
|
+
values: Iterable[Sequence[float]], dims: int
|
|
63
|
+
) -> tuple[float, ...]:
|
|
64
|
+
"""Return compensated sums of ``values`` with ``dims`` components.
|
|
65
|
+
|
|
66
|
+
Each component of the tuples in ``values`` is summed independently using the
|
|
67
|
+
Kahan–Babuška (Neumaier) algorithm to reduce floating point error.
|
|
68
|
+
"""
|
|
69
|
+
if dims < 1:
|
|
70
|
+
raise ValueError("dims must be >= 1")
|
|
71
|
+
totals = [0.0] * dims
|
|
72
|
+
comps = [0.0] * dims
|
|
73
|
+
for vs in values:
|
|
74
|
+
for i in range(dims):
|
|
75
|
+
v = vs[i]
|
|
76
|
+
t = totals[i] + v
|
|
77
|
+
if abs(totals[i]) >= abs(v):
|
|
78
|
+
comps[i] += (totals[i] - t) + v
|
|
79
|
+
else:
|
|
80
|
+
comps[i] += (v - t) + totals[i]
|
|
81
|
+
totals[i] = t
|
|
82
|
+
return tuple(float(totals[i] + comps[i]) for i in range(dims))
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def angle_diff(a: float, b: float) -> float:
|
|
86
|
+
"""Return the minimal difference between two angles in radians."""
|
|
87
|
+
return (float(a) - float(b) + math.pi) % math.tau - math.pi
|