tnfr 3.0.3__py3-none-any.whl → 4.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +57 -56
- tnfr/cli.py +177 -0
- tnfr/constants.py +41 -11
- tnfr/dynamics.py +87 -31
- tnfr/gamma.py +91 -0
- tnfr/grammar.py +149 -0
- tnfr/helpers.py +43 -15
- tnfr/main.py +20 -10
- tnfr/metrics.py +211 -0
- tnfr/observers.py +19 -7
- tnfr/ontosim.py +12 -9
- tnfr/operators.py +23 -6
- tnfr/presets.py +24 -0
- tnfr/program.py +168 -0
- tnfr/scenarios.py +28 -0
- tnfr/sense.py +215 -0
- tnfr/trace.py +145 -0
- tnfr/types.py +17 -0
- tnfr-4.0.0.dist-info/METADATA +101 -0
- tnfr-4.0.0.dist-info/RECORD +24 -0
- tnfr-4.0.0.dist-info/entry_points.txt +2 -0
- tnfr-3.0.3.dist-info/licenses/LICENSE.txt → tnfr-4.0.0.dist-info/licenses/LICENSE.md +1 -1
- tnfr-3.0.3.dist-info/METADATA +0 -35
- tnfr-3.0.3.dist-info/RECORD +0 -13
- {tnfr-3.0.3.dist-info → tnfr-4.0.0.dist-info}/WHEEL +0 -0
- {tnfr-3.0.3.dist-info → tnfr-4.0.0.dist-info}/top_level.txt +0 -0
tnfr/grammar.py
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from typing import Dict, Any, Set
|
|
3
|
+
|
|
4
|
+
from .constants import (
|
|
5
|
+
DEFAULTS,
|
|
6
|
+
ALIAS_SI, ALIAS_DNFR, ALIAS_EPI,
|
|
7
|
+
)
|
|
8
|
+
from .helpers import _get_attr, clamp01, reciente_glifo
|
|
9
|
+
from collections import deque
|
|
10
|
+
|
|
11
|
+
# Glifos nominales (para evitar typos)
|
|
12
|
+
AL = "A’L"; EN = "E’N"; IL = "I’L"; OZ = "O’Z"; UM = "U’M"; RA = "R’A"; SHA = "SH’A"; VAL = "VA’L"; NUL = "NU’L"; THOL = "T’HOL"; ZHIR = "Z’HIR"; NAV = "NA’V"; REMESH = "RE’MESH"
|
|
13
|
+
|
|
14
|
+
# -------------------------
|
|
15
|
+
# Estado de gramática por nodo
|
|
16
|
+
# -------------------------
|
|
17
|
+
|
|
18
|
+
def _gram_state(nd: Dict[str, Any]) -> Dict[str, Any]:
|
|
19
|
+
"""Crea/retorna el estado de gramática nodal.
|
|
20
|
+
Campos:
|
|
21
|
+
- thol_open (bool)
|
|
22
|
+
- thol_len (int)
|
|
23
|
+
"""
|
|
24
|
+
st = nd.setdefault("_GRAM", {"thol_open": False, "thol_len": 0})
|
|
25
|
+
st.setdefault("thol_open", False)
|
|
26
|
+
st.setdefault("thol_len", 0)
|
|
27
|
+
return st
|
|
28
|
+
|
|
29
|
+
# -------------------------
|
|
30
|
+
# Compatibilidades canónicas (siguiente permitido)
|
|
31
|
+
# -------------------------
|
|
32
|
+
CANON_COMPAT: Dict[str, Set[str]] = {
|
|
33
|
+
# Inicio / apertura
|
|
34
|
+
AL: {EN, RA, NAV, VAL, UM},
|
|
35
|
+
EN: {IL, UM, RA, NAV},
|
|
36
|
+
# Estabilización / difusión / acople
|
|
37
|
+
IL: {RA, VAL, UM, SHA},
|
|
38
|
+
UM: {RA, IL, VAL, NAV},
|
|
39
|
+
RA: {IL, VAL, UM, NAV},
|
|
40
|
+
VAL: {UM, RA, IL, NAV},
|
|
41
|
+
# Disonancia → transición → mutación
|
|
42
|
+
OZ: {ZHIR, NAV},
|
|
43
|
+
ZHIR: {IL, NAV},
|
|
44
|
+
NAV: {OZ, ZHIR, RA, IL, UM},
|
|
45
|
+
# Cierres / latencias
|
|
46
|
+
SHA: {AL, EN},
|
|
47
|
+
NUL: {AL, IL},
|
|
48
|
+
# Bloques autoorganizativos
|
|
49
|
+
THOL: {OZ, ZHIR, NAV, RA, IL, UM, SHA, NUL},
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# Fallbacks canónicos si una transición no está permitida
|
|
53
|
+
CANON_FALLBACK: Dict[str, str] = {
|
|
54
|
+
AL: EN, EN: IL, IL: RA, UM: RA, RA: IL, VAL: RA, OZ: ZHIR, ZHIR: IL, NAV: RA, SHA: AL, NUL: AL, THOL: NAV,
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
# -------------------------
|
|
58
|
+
# Cierres T’HOL y precondiciones Z’HIR
|
|
59
|
+
# -------------------------
|
|
60
|
+
|
|
61
|
+
def _dnfr_norm(G, nd) -> float:
|
|
62
|
+
# Normalizador robusto: usa historial de |ΔNFR| máx guardado por dynamics (si existe)
|
|
63
|
+
norms = G.graph.get("_sel_norms") or {}
|
|
64
|
+
dmax = float(norms.get("dnfr_max", 1.0)) or 1.0
|
|
65
|
+
return clamp01(abs(_get_attr(nd, ALIAS_DNFR, 0.0)) / dmax)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def _si(G, nd) -> float:
|
|
69
|
+
return clamp01(_get_attr(nd, ALIAS_SI, 0.5))
|
|
70
|
+
|
|
71
|
+
# -------------------------
|
|
72
|
+
# Núcleo: forzar gramática sobre un candidato
|
|
73
|
+
# -------------------------
|
|
74
|
+
|
|
75
|
+
def enforce_canonical_grammar(G, n, cand: str) -> str:
|
|
76
|
+
"""Valida/ajusta el glifo candidato según la gramática canónica.
|
|
77
|
+
|
|
78
|
+
Reglas clave:
|
|
79
|
+
- Compatibilidades de transición glífica (recorrido TNFR).
|
|
80
|
+
- O’Z→Z’HIR: la mutación requiere disonancia reciente o |ΔNFR| alto.
|
|
81
|
+
- T’HOL[...]: obliga cierre con SH’A o NU’L cuando el campo se estabiliza
|
|
82
|
+
o se alcanza el largo del bloque; mantiene estado por nodo.
|
|
83
|
+
|
|
84
|
+
Devuelve el glifo efectivo a aplicar.
|
|
85
|
+
"""
|
|
86
|
+
nd = G.nodes[n]
|
|
87
|
+
st = _gram_state(nd)
|
|
88
|
+
cfg = G.graph.get("GRAMMAR_CANON", DEFAULTS.get("GRAMMAR_CANON", {}))
|
|
89
|
+
|
|
90
|
+
# 0) Si vienen glifos fuera del alfabeto, no tocamos
|
|
91
|
+
if cand not in CANON_COMPAT:
|
|
92
|
+
return cand
|
|
93
|
+
|
|
94
|
+
# 1) Precondición O’Z→Z’HIR: mutación requiere disonancia reciente o campo fuerte
|
|
95
|
+
if cand == ZHIR:
|
|
96
|
+
win = int(cfg.get("zhir_requires_oz_window", 3))
|
|
97
|
+
dn_min = float(cfg.get("zhir_dnfr_min", 0.05))
|
|
98
|
+
if not reciente_glifo(nd, OZ, win) and _dnfr_norm(G, nd) < dn_min:
|
|
99
|
+
cand = OZ # forzamos paso por O’Z
|
|
100
|
+
|
|
101
|
+
# 2) Si estamos dentro de T’HOL, control de cierre obligado
|
|
102
|
+
if st.get("thol_open", False):
|
|
103
|
+
st["thol_len"] = int(st.get("thol_len", 0))
|
|
104
|
+
st["thol_len"] += 1
|
|
105
|
+
minlen = int(cfg.get("thol_min_len", 2))
|
|
106
|
+
maxlen = int(cfg.get("thol_max_len", 6))
|
|
107
|
+
close_dn = float(cfg.get("thol_close_dnfr", 0.15))
|
|
108
|
+
if st["thol_len"] >= maxlen or (st["thol_len"] >= minlen and _dnfr_norm(G, nd) <= close_dn):
|
|
109
|
+
cand = NUL if _si(G, nd) >= float(cfg.get("si_high", 0.66)) else SHA
|
|
110
|
+
|
|
111
|
+
# 3) Compatibilidades: si el anterior restringe el siguiente
|
|
112
|
+
prev = None
|
|
113
|
+
hist = nd.get("hist_glifos")
|
|
114
|
+
if hist:
|
|
115
|
+
try:
|
|
116
|
+
prev = list(hist)[-1]
|
|
117
|
+
except Exception:
|
|
118
|
+
prev = None
|
|
119
|
+
if prev in CANON_COMPAT and cand not in CANON_COMPAT[prev]:
|
|
120
|
+
cand = CANON_FALLBACK.get(prev, cand)
|
|
121
|
+
|
|
122
|
+
return cand
|
|
123
|
+
|
|
124
|
+
# -------------------------
|
|
125
|
+
# Post-selección: actualizar estado de gramática
|
|
126
|
+
# -------------------------
|
|
127
|
+
|
|
128
|
+
def on_applied_glifo(G, n, applied: str) -> None:
|
|
129
|
+
nd = G.nodes[n]
|
|
130
|
+
st = _gram_state(nd)
|
|
131
|
+
if applied == THOL:
|
|
132
|
+
st["thol_open"] = True
|
|
133
|
+
st["thol_len"] = 0
|
|
134
|
+
elif applied in (SHA, NUL):
|
|
135
|
+
st["thol_open"] = False
|
|
136
|
+
st["thol_len"] = 0
|
|
137
|
+
else:
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
# -------------------------
|
|
141
|
+
# Integración con dynamics.step: helper de selección+aplicación
|
|
142
|
+
# -------------------------
|
|
143
|
+
|
|
144
|
+
def select_and_apply_with_grammar(G, n, selector, window: int) -> None:
|
|
145
|
+
from .operators import aplicar_glifo
|
|
146
|
+
cand = selector(G, n)
|
|
147
|
+
cand = enforce_canonical_grammar(G, n, cand)
|
|
148
|
+
aplicar_glifo(G, n, cand, window=window)
|
|
149
|
+
on_applied_glifo(G, n, cand)
|
tnfr/helpers.py
CHANGED
|
@@ -4,37 +4,44 @@ helpers.py — TNFR canónica
|
|
|
4
4
|
Utilidades transversales + cálculo de Índice de sentido (Si).
|
|
5
5
|
"""
|
|
6
6
|
from __future__ import annotations
|
|
7
|
-
from typing import Iterable, Dict, Any
|
|
7
|
+
from typing import Iterable, Dict, Any
|
|
8
8
|
import math
|
|
9
9
|
from collections import deque
|
|
10
|
+
from statistics import fmean, StatisticsError
|
|
10
11
|
|
|
11
12
|
try:
|
|
12
13
|
import networkx as nx # solo para tipos
|
|
13
14
|
except Exception: # pragma: no cover
|
|
14
15
|
nx = None # type: ignore
|
|
15
16
|
|
|
16
|
-
from constants import DEFAULTS, ALIAS_VF, ALIAS_THETA, ALIAS_DNFR, ALIAS_EPI, ALIAS_SI
|
|
17
|
+
from .constants import DEFAULTS, ALIAS_VF, ALIAS_THETA, ALIAS_DNFR, ALIAS_EPI, ALIAS_SI
|
|
17
18
|
|
|
18
19
|
# -------------------------
|
|
19
20
|
# Utilidades numéricas
|
|
20
21
|
# -------------------------
|
|
21
22
|
|
|
22
23
|
def clamp(x: float, a: float, b: float) -> float:
|
|
24
|
+
"""Constriñe ``x`` al intervalo cerrado [a, b]."""
|
|
23
25
|
return a if x < a else b if x > b else x
|
|
24
26
|
|
|
25
27
|
|
|
26
28
|
def clamp_abs(x: float, m: float) -> float:
|
|
29
|
+
"""Limita ``x`` al rango simétrico [-m, m] usando ``abs(m)``."""
|
|
27
30
|
m = abs(m)
|
|
28
31
|
return clamp(x, -m, m)
|
|
29
32
|
|
|
30
33
|
|
|
31
34
|
def clamp01(x: float) -> float:
|
|
35
|
+
"""Ataja ``x`` a la banda [0, 1]."""
|
|
32
36
|
return clamp(x, 0.0, 1.0)
|
|
33
37
|
|
|
34
38
|
|
|
35
39
|
def list_mean(xs: Iterable[float], default: float = 0.0) -> float:
|
|
36
|
-
xs
|
|
37
|
-
|
|
40
|
+
"""Promedio aritmético o ``default`` si ``xs`` está vacío."""
|
|
41
|
+
try:
|
|
42
|
+
return fmean(xs)
|
|
43
|
+
except StatisticsError:
|
|
44
|
+
return default
|
|
38
45
|
|
|
39
46
|
|
|
40
47
|
def _wrap_angle(a: float) -> float:
|
|
@@ -74,17 +81,14 @@ def _set_attr(d, aliases, value: float) -> None:
|
|
|
74
81
|
# -------------------------
|
|
75
82
|
|
|
76
83
|
def media_vecinal(G, n, aliases: Iterable[str], default: float = 0.0) -> float:
|
|
77
|
-
|
|
78
|
-
for v in G.neighbors(n)
|
|
79
|
-
vals.append(_get_attr(G.nodes[v], aliases, default))
|
|
84
|
+
"""Media del atributo indicado por ``aliases`` en los vecinos de ``n``."""
|
|
85
|
+
vals = (_get_attr(G.nodes[v], aliases, default) for v in G.neighbors(n))
|
|
80
86
|
return list_mean(vals, default)
|
|
81
87
|
|
|
82
88
|
|
|
83
89
|
def fase_media(G, n) -> float:
|
|
84
90
|
"""Promedio circular de las fases de los vecinos."""
|
|
85
|
-
|
|
86
|
-
x = 0.0
|
|
87
|
-
y = 0.0
|
|
91
|
+
x = y = 0.0
|
|
88
92
|
count = 0
|
|
89
93
|
for v in G.neighbors(n):
|
|
90
94
|
th = _get_attr(G.nodes[v], ALIAS_THETA, 0.0)
|
|
@@ -93,7 +97,7 @@ def fase_media(G, n) -> float:
|
|
|
93
97
|
count += 1
|
|
94
98
|
if count == 0:
|
|
95
99
|
return _get_attr(G.nodes[n], ALIAS_THETA, 0.0)
|
|
96
|
-
return math.atan2(y /
|
|
100
|
+
return math.atan2(y / count, x / count)
|
|
97
101
|
|
|
98
102
|
|
|
99
103
|
# -------------------------
|
|
@@ -101,16 +105,24 @@ def fase_media(G, n) -> float:
|
|
|
101
105
|
# -------------------------
|
|
102
106
|
|
|
103
107
|
def push_glifo(nd: Dict[str, Any], glifo: str, window: int) -> None:
|
|
108
|
+
"""Añade ``glifo`` al historial del nodo con tamaño máximo ``window``."""
|
|
104
109
|
hist = nd.setdefault("hist_glifos", deque(maxlen=window))
|
|
105
110
|
hist.append(str(glifo))
|
|
106
111
|
|
|
107
112
|
|
|
108
113
|
def reciente_glifo(nd: Dict[str, Any], glifo: str, ventana: int) -> bool:
|
|
114
|
+
"""Indica si ``glifo`` apareció en las últimas ``ventana`` emisiones."""
|
|
109
115
|
hist = nd.get("hist_glifos")
|
|
110
116
|
if not hist:
|
|
111
117
|
return False
|
|
112
|
-
|
|
113
|
-
|
|
118
|
+
gl = str(glifo)
|
|
119
|
+
for g in reversed(hist):
|
|
120
|
+
if g == gl:
|
|
121
|
+
return True
|
|
122
|
+
ventana -= 1
|
|
123
|
+
if ventana <= 0:
|
|
124
|
+
break
|
|
125
|
+
return False
|
|
114
126
|
|
|
115
127
|
# -------------------------
|
|
116
128
|
# Callbacks Γ(R)
|
|
@@ -128,10 +140,26 @@ def _ensure_callbacks(G):
|
|
|
128
140
|
cbs.setdefault(k, [])
|
|
129
141
|
return cbs
|
|
130
142
|
|
|
131
|
-
def register_callback(
|
|
132
|
-
|
|
143
|
+
def register_callback(
|
|
144
|
+
G,
|
|
145
|
+
event: str | None = None,
|
|
146
|
+
func=None,
|
|
147
|
+
*,
|
|
148
|
+
when: str | None = None,
|
|
149
|
+
name: str | None = None,
|
|
150
|
+
):
|
|
151
|
+
"""Registra ``func`` como callback del ``event`` indicado.
|
|
152
|
+
|
|
153
|
+
Permite tanto la forma posicional ``register_callback(G, "after_step", fn)``
|
|
154
|
+
como la forma con palabras clave ``register_callback(G, when="after_step", func=fn)``.
|
|
155
|
+
El parámetro ``name`` se acepta por compatibilidad pero actualmente no se
|
|
156
|
+
utiliza.
|
|
157
|
+
"""
|
|
158
|
+
event = event or when
|
|
133
159
|
if event not in ("before_step", "after_step", "on_remesh"):
|
|
134
160
|
raise ValueError(f"Evento desconocido: {event}")
|
|
161
|
+
if func is None:
|
|
162
|
+
raise TypeError("func es obligatorio")
|
|
135
163
|
cbs = _ensure_callbacks(G)
|
|
136
164
|
cbs[event].append(func)
|
|
137
165
|
return func
|
tnfr/main.py
CHANGED
|
@@ -1,7 +1,11 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
-
import argparse, sys
|
|
3
|
-
import networkx as nx
|
|
4
|
-
from . import preparar_red, run, __version__
|
|
2
|
+
import argparse, sys
|
|
3
|
+
import networkx as nx
|
|
4
|
+
from . import preparar_red, run, __version__
|
|
5
|
+
from .constants import merge_overrides, attach_defaults
|
|
6
|
+
from .sense import register_sigma_callback
|
|
7
|
+
from .metrics import register_metrics_callbacks
|
|
8
|
+
from .trace import register_trace
|
|
5
9
|
|
|
6
10
|
def main(argv: list[str] | None = None) -> None:
|
|
7
11
|
p = argparse.ArgumentParser(
|
|
@@ -15,13 +19,19 @@ def main(argv: list[str] | None = None) -> None:
|
|
|
15
19
|
p.add_argument("--observer", action="store_true", help="adjunta observador estándar")
|
|
16
20
|
args = p.parse_args(argv)
|
|
17
21
|
|
|
18
|
-
if args.version:
|
|
19
|
-
print(__version__)
|
|
20
|
-
return
|
|
21
|
-
|
|
22
|
-
G = nx.erdos_renyi_graph(args.n, args.p)
|
|
23
|
-
preparar_red(G, ATTACH_STD_OBSERVER=bool(args.observer))
|
|
24
|
-
|
|
22
|
+
if args.version:
|
|
23
|
+
print(__version__)
|
|
24
|
+
return
|
|
25
|
+
|
|
26
|
+
G = nx.erdos_renyi_graph(args.n, args.p)
|
|
27
|
+
preparar_red(G, ATTACH_STD_OBSERVER=bool(args.observer))
|
|
28
|
+
attach_defaults(G)
|
|
29
|
+
register_sigma_callback(G)
|
|
30
|
+
register_metrics_callbacks(G)
|
|
31
|
+
register_trace(G)
|
|
32
|
+
# Ejemplo: activar Γi(R) lineal con β=0.2 y R0=0.5
|
|
33
|
+
merge_overrides(G, GAMMA={"type": "kuramoto_linear", "beta": 0.2, "R0": 0.5})
|
|
34
|
+
run(G, args.steps)
|
|
25
35
|
|
|
26
36
|
h = G.graph.get("history", {})
|
|
27
37
|
C = h.get("C_steps", [])[-1] if h.get("C_steps") else None
|
tnfr/metrics.py
ADDED
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from typing import Dict, Any, List, Tuple
|
|
3
|
+
from collections import defaultdict, Counter
|
|
4
|
+
import statistics
|
|
5
|
+
|
|
6
|
+
from .constants import DEFAULTS
|
|
7
|
+
from .helpers import _get_attr, clamp01, register_callback
|
|
8
|
+
from .sense import GLYPHS_CANONICAL
|
|
9
|
+
|
|
10
|
+
# -------------
|
|
11
|
+
# DEFAULTS
|
|
12
|
+
# -------------
|
|
13
|
+
DEFAULTS.setdefault("METRICS", {
|
|
14
|
+
"enabled": True,
|
|
15
|
+
"save_by_node": True, # guarda Tg por nodo (más pesado)
|
|
16
|
+
"normalize_series": False # glifograma normalizado a fracción por paso
|
|
17
|
+
})
|
|
18
|
+
|
|
19
|
+
# -------------
|
|
20
|
+
# Utilidades internas
|
|
21
|
+
# -------------
|
|
22
|
+
|
|
23
|
+
def _ensure_history(G):
|
|
24
|
+
if "history" not in G.graph:
|
|
25
|
+
G.graph["history"] = {}
|
|
26
|
+
return G.graph["history"]
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _last_glifo(nd: Dict[str, Any]) -> str | None:
|
|
30
|
+
hist = nd.get("hist_glifos")
|
|
31
|
+
if not hist:
|
|
32
|
+
return None
|
|
33
|
+
try:
|
|
34
|
+
return list(hist)[-1]
|
|
35
|
+
except Exception:
|
|
36
|
+
return None
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
# -------------
|
|
40
|
+
# Estado nodal para Tg
|
|
41
|
+
# -------------
|
|
42
|
+
|
|
43
|
+
def _tg_state(nd: Dict[str, Any]) -> Dict[str, Any]:
|
|
44
|
+
"""Estructura interna por nodo para acumular tiempos de corrida por glifo.
|
|
45
|
+
Campos: curr (glifo actual), run (tiempo acumulado en el glifo actual)
|
|
46
|
+
"""
|
|
47
|
+
st = nd.setdefault("_Tg", {"curr": None, "run": 0.0})
|
|
48
|
+
st.setdefault("curr", None)
|
|
49
|
+
st.setdefault("run", 0.0)
|
|
50
|
+
return st
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
# -------------
|
|
54
|
+
# Callback principal: actualizar métricas por paso
|
|
55
|
+
# -------------
|
|
56
|
+
|
|
57
|
+
def _metrics_step(G, *args, **kwargs):
|
|
58
|
+
"""Actualiza métricas operativas TNFR por paso.
|
|
59
|
+
|
|
60
|
+
- Tg (tiempo glífico): sumatoria de corridas por glifo (global y por nodo).
|
|
61
|
+
- Índice de latencia: fracción de nodos en SH’A.
|
|
62
|
+
- Glifograma: conteo o fracción por glifo en el paso.
|
|
63
|
+
|
|
64
|
+
Todos los resultados se guardan en G.graph['history'].
|
|
65
|
+
"""
|
|
66
|
+
if not G.graph.get("METRICS", DEFAULTS.get("METRICS", {})).get("enabled", True):
|
|
67
|
+
return
|
|
68
|
+
|
|
69
|
+
hist = _ensure_history(G)
|
|
70
|
+
dt = float(G.graph.get("DT", 1.0))
|
|
71
|
+
t = float(G.graph.get("_t", 0.0))
|
|
72
|
+
|
|
73
|
+
# --- Glifograma (conteos por glifo este paso) ---
|
|
74
|
+
counts = Counter()
|
|
75
|
+
|
|
76
|
+
# --- Índice de latencia: proporción de nodos en SH’A ---
|
|
77
|
+
n_total = 0
|
|
78
|
+
n_latent = 0
|
|
79
|
+
|
|
80
|
+
# --- Tg: acumular corridas por nodo ---
|
|
81
|
+
save_by_node = bool(G.graph.get("METRICS", DEFAULTS["METRICS"]).get("save_by_node", True))
|
|
82
|
+
tg_total = hist.setdefault("Tg_total", defaultdict(float)) # tiempo total por glifo (global)
|
|
83
|
+
tg_by_node = hist.setdefault("Tg_by_node", {}) # nodo → {glifo: [runs,...]}
|
|
84
|
+
|
|
85
|
+
for n in G.nodes():
|
|
86
|
+
nd = G.nodes[n]
|
|
87
|
+
g = _last_glifo(nd)
|
|
88
|
+
if not g:
|
|
89
|
+
continue
|
|
90
|
+
|
|
91
|
+
n_total += 1
|
|
92
|
+
if g == "SH’A":
|
|
93
|
+
n_latent += 1
|
|
94
|
+
|
|
95
|
+
counts[g] += 1
|
|
96
|
+
|
|
97
|
+
st = _tg_state(nd)
|
|
98
|
+
# Si seguimos en el mismo glifo, acumulamos; si cambiamos, cerramos corrida
|
|
99
|
+
if st["curr"] is None:
|
|
100
|
+
st["curr"] = g
|
|
101
|
+
st["run"] = dt
|
|
102
|
+
elif g == st["curr"]:
|
|
103
|
+
st["run"] += dt
|
|
104
|
+
else:
|
|
105
|
+
# cerramos corrida anterior
|
|
106
|
+
prev = st["curr"]
|
|
107
|
+
dur = float(st["run"])
|
|
108
|
+
tg_total[prev] += dur
|
|
109
|
+
if save_by_node:
|
|
110
|
+
rec = tg_by_node.setdefault(n, defaultdict(list))
|
|
111
|
+
rec[prev].append(dur)
|
|
112
|
+
# reiniciamos corrida
|
|
113
|
+
st["curr"] = g
|
|
114
|
+
st["run"] = dt
|
|
115
|
+
|
|
116
|
+
# Al final del paso, no cerramos la corrida actual: se cerrará cuando cambie.
|
|
117
|
+
|
|
118
|
+
# Guardar glifograma (conteos crudos y normalizados)
|
|
119
|
+
norm = bool(G.graph.get("METRICS", DEFAULTS["METRICS"]).get("normalize_series", False))
|
|
120
|
+
row = {"t": t}
|
|
121
|
+
total = max(1, sum(counts.values()))
|
|
122
|
+
for g in GLYPHS_CANONICAL:
|
|
123
|
+
c = counts.get(g, 0)
|
|
124
|
+
row[g] = (c / total) if norm else c
|
|
125
|
+
hist.setdefault("glifogram", []).append(row)
|
|
126
|
+
|
|
127
|
+
# Guardar índice de latencia
|
|
128
|
+
li = (n_latent / max(1, n_total)) if n_total else 0.0
|
|
129
|
+
hist.setdefault("latency_index", []).append({"t": t, "value": li})
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
# -------------
|
|
133
|
+
# Registro del callback
|
|
134
|
+
# -------------
|
|
135
|
+
|
|
136
|
+
def register_metrics_callbacks(G) -> None:
|
|
137
|
+
register_callback(G, when="after_step", func=_metrics_step, name="metrics_step")
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
# -------------
|
|
141
|
+
# Consultas / reportes
|
|
142
|
+
# -------------
|
|
143
|
+
|
|
144
|
+
def Tg_global(G, normalize: bool = True) -> Dict[str, float]:
|
|
145
|
+
"""Tiempo glífico total por clase. Si normalize=True, devuelve fracciones del total."""
|
|
146
|
+
hist = _ensure_history(G)
|
|
147
|
+
tg_total: Dict[str, float] = hist.get("Tg_total", {})
|
|
148
|
+
total = sum(tg_total.values()) or 1.0
|
|
149
|
+
if normalize:
|
|
150
|
+
return {g: float(tg_total.get(g, 0.0)) / total for g in GLYPHS_CANONICAL}
|
|
151
|
+
return {g: float(tg_total.get(g, 0.0)) for g in GLYPHS_CANONICAL}
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
def Tg_by_node(G, n, normalize: bool = False) -> Dict[str, float | List[float]]:
|
|
155
|
+
"""Resumen por nodo: si normalize, devuelve medias por glifo; si no, lista de corridas."""
|
|
156
|
+
hist = _ensure_history(G)
|
|
157
|
+
rec = hist.get("Tg_by_node", {}).get(n, {})
|
|
158
|
+
if not normalize:
|
|
159
|
+
# convertir default dict → list para serializar
|
|
160
|
+
return {g: list(rec.get(g, [])) for g in GLYPHS_CANONICAL}
|
|
161
|
+
out = {}
|
|
162
|
+
for g in GLYPHS_CANONICAL:
|
|
163
|
+
runs = rec.get(g, [])
|
|
164
|
+
out[g] = float(statistics.mean(runs)) if runs else 0.0
|
|
165
|
+
|
|
166
|
+
return out
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def latency_series(G) -> Dict[str, List[float]]:
|
|
170
|
+
hist = _ensure_history(G)
|
|
171
|
+
xs = hist.get("latency_index", [])
|
|
172
|
+
return {
|
|
173
|
+
"t": [float(x.get("t", i)) for i, x in enumerate(xs)],
|
|
174
|
+
"value": [float(x.get("value", 0.0)) for x in xs],
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def glifogram_series(G) -> Dict[str, List[float]]:
|
|
179
|
+
hist = _ensure_history(G)
|
|
180
|
+
xs = hist.get("glifogram", [])
|
|
181
|
+
if not xs:
|
|
182
|
+
return {"t": []}
|
|
183
|
+
out = {"t": [float(x.get("t", i)) for i, x in enumerate(xs)]}
|
|
184
|
+
for g in GLYPHS_CANONICAL:
|
|
185
|
+
out[g] = [float(x.get(g, 0.0)) for x in xs]
|
|
186
|
+
return out
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def glyph_top(G, k: int = 3) -> List[Tuple[str, float]]:
|
|
190
|
+
"""Top-k glifos por Tg_global (fracción)."""
|
|
191
|
+
tg = Tg_global(G, normalize=True)
|
|
192
|
+
return sorted(tg.items(), key=lambda kv: kv[1], reverse=True)[:max(1, int(k))]
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def glyph_dwell_stats(G, n) -> Dict[str, Dict[str, float]]:
|
|
196
|
+
"""Estadísticos por nodo: mean/median/max de corridas por glifo."""
|
|
197
|
+
hist = _ensure_history(G)
|
|
198
|
+
rec = hist.get("Tg_by_node", {}).get(n, {})
|
|
199
|
+
out = {}
|
|
200
|
+
for g in GLYPHS_CANONICAL:
|
|
201
|
+
runs = list(rec.get(g, []))
|
|
202
|
+
if not runs:
|
|
203
|
+
out[g] = {"mean": 0.0, "median": 0.0, "max": 0.0, "count": 0}
|
|
204
|
+
else:
|
|
205
|
+
out[g] = {
|
|
206
|
+
"mean": float(statistics.mean(runs)),
|
|
207
|
+
"median": float(statistics.median(runs)),
|
|
208
|
+
"max": float(max(runs)),
|
|
209
|
+
"count": int(len(runs)),
|
|
210
|
+
}
|
|
211
|
+
return out
|
tnfr/observers.py
CHANGED
|
@@ -8,8 +8,8 @@ from collections import Counter
|
|
|
8
8
|
from typing import Dict, Any
|
|
9
9
|
import math
|
|
10
10
|
|
|
11
|
-
from constants import ALIAS_DNFR, ALIAS_EPI, ALIAS_THETA, ALIAS_dEPI
|
|
12
|
-
from helpers import _get_attr, list_mean, register_callback
|
|
11
|
+
from .constants import ALIAS_DNFR, ALIAS_EPI, ALIAS_THETA, ALIAS_dEPI
|
|
12
|
+
from .helpers import _get_attr, list_mean, register_callback
|
|
13
13
|
|
|
14
14
|
# -------------------------
|
|
15
15
|
# Observador estándar Γ(R)
|
|
@@ -44,15 +44,27 @@ def coherencia_global(G) -> float:
|
|
|
44
44
|
|
|
45
45
|
|
|
46
46
|
def sincronía_fase(G) -> float:
|
|
47
|
-
X =
|
|
48
|
-
Y =
|
|
47
|
+
X = [math.cos(_get_attr(G.nodes[n], ALIAS_THETA, 0.0)) for n in G.nodes()]
|
|
48
|
+
Y = [math.sin(_get_attr(G.nodes[n], ALIAS_THETA, 0.0)) for n in G.nodes()]
|
|
49
49
|
if not X:
|
|
50
50
|
return 1.0
|
|
51
|
-
|
|
52
|
-
th = math.atan2(sum(Y)/len(Y), sum(X)/len(X))
|
|
51
|
+
th = math.atan2(sum(Y) / len(Y), sum(X) / len(X))
|
|
53
52
|
# varianza angular aproximada (0 = muy sincronizado)
|
|
54
53
|
import statistics as st
|
|
55
|
-
var =
|
|
54
|
+
var = (
|
|
55
|
+
st.pvariance(
|
|
56
|
+
[
|
|
57
|
+
(
|
|
58
|
+
(_get_attr(G.nodes[n], ALIAS_THETA, 0.0) - th + math.pi)
|
|
59
|
+
% (2 * math.pi)
|
|
60
|
+
- math.pi
|
|
61
|
+
)
|
|
62
|
+
for n in G.nodes()
|
|
63
|
+
]
|
|
64
|
+
)
|
|
65
|
+
if len(X) > 1
|
|
66
|
+
else 0.0
|
|
67
|
+
)
|
|
56
68
|
return 1.0 / (1.0 + var)
|
|
57
69
|
|
|
58
70
|
def orden_kuramoto(G) -> float:
|
tnfr/ontosim.py
CHANGED
|
@@ -5,20 +5,21 @@ Módulo de orquestación mínima que encadena:
|
|
|
5
5
|
ΔNFR (campo) → Si → glifos → ecuación nodal → clamps → U’M → observadores → RE’MESH
|
|
6
6
|
"""
|
|
7
7
|
from __future__ import annotations
|
|
8
|
-
import networkx as nx
|
|
9
|
-
import math
|
|
10
|
-
import random
|
|
8
|
+
import networkx as nx
|
|
9
|
+
import math
|
|
10
|
+
import random
|
|
11
|
+
from collections import deque
|
|
11
12
|
|
|
12
|
-
from constants import DEFAULTS, attach_defaults
|
|
13
|
-
from dynamics import step as _step, run as _run
|
|
14
|
-
from dynamics import default_compute_delta_nfr
|
|
13
|
+
from .constants import DEFAULTS, attach_defaults
|
|
14
|
+
from .dynamics import step as _step, run as _run
|
|
15
|
+
from .dynamics import default_compute_delta_nfr
|
|
15
16
|
|
|
16
17
|
# API de alto nivel
|
|
17
18
|
|
|
18
19
|
def preparar_red(G: nx.Graph, *, override_defaults: bool = False, **overrides) -> nx.Graph:
|
|
19
20
|
attach_defaults(G, override=override_defaults)
|
|
20
21
|
if overrides:
|
|
21
|
-
from constants import merge_overrides
|
|
22
|
+
from .constants import merge_overrides
|
|
22
23
|
merge_overrides(G, **overrides)
|
|
23
24
|
# Inicializaciones blandas
|
|
24
25
|
G.graph.setdefault("history", {
|
|
@@ -43,11 +44,13 @@ def preparar_red(G: nx.Graph, *, override_defaults: bool = False, **overrides) -
|
|
|
43
44
|
"phase_R": [],
|
|
44
45
|
"phase_disr": [],
|
|
45
46
|
})
|
|
46
|
-
G.graph.
|
|
47
|
+
tau = int(G.graph.get("REMESH_TAU", DEFAULTS["REMESH_TAU"]))
|
|
48
|
+
maxlen = max(2 * tau + 5, 64)
|
|
49
|
+
G.graph.setdefault("_epi_hist", deque(maxlen=maxlen))
|
|
47
50
|
# Auto-attach del observador estándar si se pide
|
|
48
51
|
if G.graph.get("ATTACH_STD_OBSERVER", False):
|
|
49
52
|
try:
|
|
50
|
-
from observers import attach_standard_observer
|
|
53
|
+
from .observers import attach_standard_observer
|
|
51
54
|
attach_standard_observer(G)
|
|
52
55
|
except Exception as e:
|
|
53
56
|
G.graph.setdefault("_callback_errors", []).append(
|
tnfr/operators.py
CHANGED
|
@@ -5,14 +5,15 @@ import math
|
|
|
5
5
|
import random
|
|
6
6
|
import hashlib
|
|
7
7
|
|
|
8
|
-
from constants import DEFAULTS, ALIAS_VF, ALIAS_THETA, ALIAS_DNFR, ALIAS_EPI, ALIAS_D2EPI
|
|
9
|
-
from helpers import _get_attr, _set_attr, clamp, clamp01, list_mean, fase_media, push_glifo, invoke_callbacks
|
|
8
|
+
from .constants import DEFAULTS, ALIAS_VF, ALIAS_THETA, ALIAS_DNFR, ALIAS_EPI, ALIAS_D2EPI
|
|
9
|
+
from .helpers import _get_attr, _set_attr, clamp, clamp01, list_mean, fase_media, push_glifo, invoke_callbacks
|
|
10
|
+
from collections import deque
|
|
10
11
|
|
|
11
12
|
"""
|
|
12
13
|
Este módulo implementa:
|
|
13
14
|
- Los 13 glifos como operadores locales suaves.
|
|
14
15
|
- Un dispatcher `aplicar_glifo` que mapea el nombre del glifo (con apóstrofo tipográfico) a su función.
|
|
15
|
-
- RE’MESH de red: `aplicar_remesh_red` y `
|
|
16
|
+
- RE’MESH de red: `aplicar_remesh_red` y `aplicar_remesh_si_estabilización_global`.
|
|
16
17
|
|
|
17
18
|
Nota sobre α (alpha) de RE’MESH: se toma por prioridad de
|
|
18
19
|
1) G.graph["GLYPH_FACTORS"]["REMESH_alpha"]
|
|
@@ -153,6 +154,18 @@ _NAME_TO_OP = {
|
|
|
153
154
|
|
|
154
155
|
|
|
155
156
|
def aplicar_glifo(G, n, glifo: str, *, window: Optional[int] = None) -> None:
|
|
157
|
+
"""Aplica un glifo TNFR al nodo `n` con histéresis `window`.
|
|
158
|
+
|
|
159
|
+
Los 13 glifos implementan reorganizadores canónicos:
|
|
160
|
+
A’L (emisión), E’N (recepción), I’L (coherencia), O’Z (disonancia),
|
|
161
|
+
U’M (acoplamiento), R’A (resonancia), SH’A (silencio), VA’L (expansión),
|
|
162
|
+
NU’L (contracción), T’HOL (autoorganización), Z’HIR (mutación),
|
|
163
|
+
NA’V (transición), RE’MESH (recursividad).
|
|
164
|
+
|
|
165
|
+
Relación con la gramática: la selección previa debe pasar por
|
|
166
|
+
`enforce_canonical_grammar` (grammar.py) para respetar compatibilidades,
|
|
167
|
+
precondición O’Z→Z’HIR y cierres T’HOL[...].
|
|
168
|
+
"""
|
|
156
169
|
glifo = str(glifo)
|
|
157
170
|
op = _NAME_TO_OP.get(glifo)
|
|
158
171
|
if not op:
|
|
@@ -186,7 +199,7 @@ def aplicar_remesh_red(G) -> None:
|
|
|
186
199
|
"""
|
|
187
200
|
tau = int(G.graph.get("REMESH_TAU", DEFAULTS["REMESH_TAU"]))
|
|
188
201
|
alpha, alpha_src = _remesh_alpha_info(G)
|
|
189
|
-
hist = G.graph.get("_epi_hist",
|
|
202
|
+
hist = G.graph.get("_epi_hist", deque())
|
|
190
203
|
if len(hist) < tau + 1:
|
|
191
204
|
return
|
|
192
205
|
|
|
@@ -256,7 +269,11 @@ def aplicar_remesh_red(G) -> None:
|
|
|
256
269
|
|
|
257
270
|
def aplicar_remesh_si_estabilizacion_global(G, pasos_estables_consecutivos: Optional[int] = None) -> None:
|
|
258
271
|
# Ventanas y umbrales
|
|
259
|
-
w_estab =
|
|
272
|
+
w_estab = (
|
|
273
|
+
pasos_estables_consecutivos
|
|
274
|
+
if pasos_estables_consecutivos is not None
|
|
275
|
+
else int(G.graph.get("REMESH_STABILITY_WINDOW", DEFAULTS["REMESH_STABILITY_WINDOW"]))
|
|
276
|
+
)
|
|
260
277
|
frac_req = float(G.graph.get("FRACTION_STABLE_REMESH", DEFAULTS["FRACTION_STABLE_REMESH"]))
|
|
261
278
|
req_extra = bool(G.graph.get("REMESH_REQUIRE_STABILITY", DEFAULTS["REMESH_REQUIRE_STABILITY"]))
|
|
262
279
|
min_sync = float(G.graph.get("REMESH_MIN_PHASE_SYNC", DEFAULTS["REMESH_MIN_PHASE_SYNC"]))
|
|
@@ -293,4 +310,4 @@ def aplicar_remesh_si_estabilizacion_global(G, pasos_estables_consecutivos: Opti
|
|
|
293
310
|
return
|
|
294
311
|
# 4) Aplicar y registrar
|
|
295
312
|
aplicar_remesh_red(G)
|
|
296
|
-
G.graph["_last_remesh_step"] = step_idx
|
|
313
|
+
G.graph["_last_remesh_step"] = step_idx
|