tnfr 4.1.0__py3-none-any.whl → 4.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

tnfr/operators.py CHANGED
@@ -1,12 +1,24 @@
1
1
  # operators.py — TNFR canónica (ASCII-safe)
2
2
  from __future__ import annotations
3
- from typing import Dict, Any, Optional
3
+ from typing import Dict, Any, Optional, Iterable
4
4
  import math
5
5
  import random
6
6
  import hashlib
7
-
8
- from .constants import DEFAULTS, ALIAS_VF, ALIAS_THETA, ALIAS_DNFR, ALIAS_EPI, ALIAS_D2EPI
9
- from .helpers import _get_attr, _set_attr, clamp, clamp01, list_mean, fase_media, push_glifo, invoke_callbacks
7
+ import networkx as nx
8
+ from networkx.algorithms import community as nx_comm
9
+
10
+ from .constants import DEFAULTS, ALIAS_EPI
11
+ from .helpers import (
12
+ clamp,
13
+ clamp01,
14
+ list_mean,
15
+ invoke_callbacks,
16
+ _get_attr,
17
+ _set_attr,
18
+ _get_attr_str,
19
+ _set_attr_str,
20
+ )
21
+ from .node import NodoProtocol, NodoNX
10
22
  from collections import deque
11
23
 
12
24
  """
@@ -34,121 +46,156 @@ def _node_offset(G, n) -> int:
34
46
  # Glifos (operadores locales)
35
47
  # -------------------------
36
48
 
37
- def op_AL(G, n): # A’L — Emisión
38
- f = float(G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("AL_boost", 0.05))
39
- nd = G.nodes[n]
40
- epi = _get_attr(nd, ALIAS_EPI, 0.0)
41
- _set_attr(nd, ALIAS_EPI, epi + f)
42
-
43
-
44
- def op_EN(G, n): # E’N — Recepción
45
- mix = float(G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("EN_mix", 0.25))
46
- nd = G.nodes[n]
47
- epi = _get_attr(nd, ALIAS_EPI, 0.0)
48
- if G.degree(n) == 0:
49
- return # sin vecinos no hay mezcla
50
- epi_bar = list_mean(_get_attr(G.nodes[v], ALIAS_EPI, epi) for v in G.neighbors(n))
51
- _set_attr(nd, ALIAS_EPI, (1 - mix) * epi + mix * epi_bar)
52
-
53
-
54
- def op_IL(G, n): # I’L — Coherencia (reduce ΔNFR)
55
- factor = float(G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("IL_dnfr_factor", 0.7))
56
- nd = G.nodes[n]
57
- dnfr = _get_attr(nd, ALIAS_DNFR, 0.0)
58
- _set_attr(nd, ALIAS_DNFR, factor * dnfr)
59
-
60
- def op_OZ(G, n): # O’Z — Disonancia (aumenta ΔNFR o añade ruido)
61
- factor = float(G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("OZ_dnfr_factor", 1.3))
62
- nd = G.nodes[n]
63
- dnfr = _get_attr(nd, ALIAS_DNFR, 0.0)
64
- if bool(G.graph.get("OZ_NOISE_MODE", False)):
65
- base_seed = int(G.graph.get("RANDOM_SEED", 0))
66
- step_idx = len(G.graph.get("history", {}).get("C_steps", []))
67
- rnd = random.Random(base_seed + step_idx*1000003 + _node_offset(G, n) % 1009)
68
- sigma = float(G.graph.get("OZ_SIGMA", 0.1))
49
+ def _fase_media_node(node: NodoProtocol) -> float:
50
+ x = y = 0.0
51
+ count = 0
52
+ for v in node.neighbors():
53
+ th = getattr(v, "theta", 0.0)
54
+ x += math.cos(th)
55
+ y += math.sin(th)
56
+ count += 1
57
+ if count == 0:
58
+ return getattr(node, "theta", 0.0)
59
+ return math.atan2(y / count, x / count)
60
+
61
+
62
+ def _op_AL(node: NodoProtocol) -> None: # A’L — Emisión
63
+ f = float(node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("AL_boost", 0.05))
64
+ node.EPI = node.EPI + f
65
+ node.epi_kind = "A’L"
66
+
67
+
68
+ def _op_EN(node: NodoProtocol) -> None: # E’N — Recepción
69
+ mix = float(node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("EN_mix", 0.25))
70
+ epi = node.EPI
71
+ neigh = list(node.neighbors())
72
+ if not neigh:
73
+ return
74
+ epi_bar = list_mean(v.EPI for v in neigh) if neigh else epi
75
+ node.EPI = (1 - mix) * epi + mix * epi_bar
76
+
77
+ candidatos = [(abs(node.EPI), node.epi_kind)]
78
+ for v in neigh:
79
+ candidatos.append((abs(v.EPI), v.epi_kind))
80
+ node.epi_kind = max(candidatos, key=lambda x: x[0])[1] or "E’N"
81
+
82
+
83
+ def _op_IL(node: NodoProtocol) -> None: # I’L — Coherencia
84
+ factor = float(node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("IL_dnfr_factor", 0.7))
85
+ node.dnfr = factor * getattr(node, "dnfr", 0.0)
86
+
87
+
88
+ def _op_OZ(node: NodoProtocol) -> None: # O’Z — Disonancia
89
+ factor = float(node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("OZ_dnfr_factor", 1.3))
90
+ dnfr = getattr(node, "dnfr", 0.0)
91
+ if bool(node.graph.get("OZ_NOISE_MODE", False)):
92
+ base_seed = int(node.graph.get("RANDOM_SEED", 0))
93
+ step_idx = len(node.graph.get("history", {}).get("C_steps", []))
94
+ rnd = random.Random(base_seed + step_idx * 1000003 + node.offset() % 1009)
95
+ sigma = float(node.graph.get("OZ_SIGMA", 0.1))
69
96
  noise = sigma * (2.0 * rnd.random() - 1.0)
70
- _set_attr(nd, ALIAS_DNFR, dnfr + noise)
97
+ node.dnfr = dnfr + noise
71
98
  else:
72
- _set_attr(nd, ALIAS_DNFR, factor * dnfr if abs(dnfr) > 1e-9 else 0.1)
99
+ node.dnfr = factor * dnfr if abs(dnfr) > 1e-9 else 0.1
100
+
73
101
 
74
- def op_UM(G, n): # U’M — Acoplamiento (empuja fase a la media local)
75
- k = float(G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("UM_theta_push", 0.25))
76
- nd = G.nodes[n]
77
- th = _get_attr(nd, ALIAS_THETA, 0.0)
78
- thL = fase_media(G, n)
102
+ def _op_UM(node: NodoProtocol) -> None: # U’M — Acoplamiento
103
+ k = float(node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("UM_theta_push", 0.25))
104
+ th = node.theta
105
+ thL = _fase_media_node(node)
79
106
  d = ((thL - th + math.pi) % (2 * math.pi) - math.pi)
80
- _set_attr(nd, ALIAS_THETA, th + k * d)
107
+ node.theta = th + k * d
108
+
109
+ if bool(node.graph.get("UM_FUNCTIONAL_LINKS", False)):
110
+ thr = float(node.graph.get("UM_COMPAT_THRESHOLD", DEFAULTS.get("UM_COMPAT_THRESHOLD", 0.75)))
111
+ epi_i = node.EPI
112
+ si_i = node.Si
113
+ for j in node.all_nodes():
114
+ if j is node or node.has_edge(j):
115
+ continue
116
+ th_j = j.theta
117
+ dphi = abs(((th_j - th + math.pi) % (2 * math.pi)) - math.pi) / math.pi
118
+ epi_j = j.EPI
119
+ si_j = j.Si
120
+ epi_sim = 1.0 - abs(epi_i - epi_j) / (abs(epi_i) + abs(epi_j) + 1e-9)
121
+ si_sim = 1.0 - abs(si_i - si_j)
122
+ compat = (1 - dphi) * 0.5 + 0.25 * epi_sim + 0.25 * si_sim
123
+ if compat >= thr:
124
+ node.add_edge(j, compat)
125
+
126
+
127
+ def _op_RA(node: NodoProtocol) -> None: # R’A — Resonancia
128
+ diff = float(node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("RA_epi_diff", 0.15))
129
+ epi = node.EPI
130
+ neigh = list(node.neighbors())
131
+ if not neigh:
132
+ return
133
+ epi_bar = list_mean(v.EPI for v in neigh)
134
+ node.EPI = epi + diff * (epi_bar - epi)
81
135
 
136
+ candidatos = [(abs(node.EPI), node.epi_kind)]
137
+ for v in neigh:
138
+ candidatos.append((abs(v.EPI), v.epi_kind))
139
+ node.epi_kind = max(candidatos, key=lambda x: x[0])[1] or "R’A"
82
140
 
83
- def op_RA(G, n): # R’A — Resonancia (difusión EPI)
84
- diff = float(G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("RA_epi_diff", 0.15))
85
- nd = G.nodes[n]
86
- epi = _get_attr(nd, ALIAS_EPI, 0.0)
87
- if G.degree(n) == 0:
88
- return
89
- epi_bar = list_mean(_get_attr(G.nodes[v], ALIAS_EPI, epi) for v in G.neighbors(n))
90
- _set_attr(nd, ALIAS_EPI, epi + diff * (epi_bar - epi))
91
-
92
-
93
- def op_SHA(G, n): # SH’A — Silencio (baja νf)
94
- factor = float(G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("SHA_vf_factor", 0.85))
95
- nd = G.nodes[n]
96
- vf = _get_attr(nd, ALIAS_VF, 0.0)
97
- _set_attr(nd, ALIAS_VF, factor * vf)
98
-
99
-
100
- def op_VAL(G, n): # VA’L — Expansión (escala EPI)
101
- s = float(G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("VAL_scale", 1.15))
102
- nd = G.nodes[n]
103
- epi = _get_attr(nd, ALIAS_EPI, 0.0)
104
- _set_attr(nd, ALIAS_EPI, s * epi)
105
-
106
-
107
- def op_NUL(G, n): # NU’L — Contracción (escala EPI)
108
- s = float(G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("NUL_scale", 0.85))
109
- nd = G.nodes[n]
110
- epi = _get_attr(nd, ALIAS_EPI, 0.0)
111
- _set_attr(nd, ALIAS_EPI, s * epi)
112
-
113
-
114
- def op_THOL(G, n): # T’HOL — Autoorganización (inyecta aceleración)
115
- a = float(G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("THOL_accel", 0.10))
116
- nd = G.nodes[n]
117
- d2 = _get_attr(nd, ALIAS_D2EPI, 0.0)
118
- _set_attr(nd, ALIAS_DNFR, _get_attr(nd, ALIAS_DNFR, 0.0) + a * d2)
119
-
120
-
121
- def op_ZHIR(G, n): # Z’HIR — Mutación (desplaza fase)
122
- shift = float(G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("ZHIR_theta_shift", 1.57079632679))
123
- nd = G.nodes[n]
124
- th = _get_attr(nd, ALIAS_THETA, 0.0)
125
- _set_attr(nd, ALIAS_THETA, th + shift)
126
-
127
- def op_NAV(G, n): # NA’V Transición (jitter suave de ΔNFR)
128
- nd = G.nodes[n]
129
- dnfr = _get_attr(nd, ALIAS_DNFR, 0.0)
130
- j = float(G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("NAV_jitter", 0.05))
131
- if bool(G.graph.get("NAV_RANDOM", True)):
132
- # jitter uniforme en [-j, j] con semilla reproducible
133
- base_seed = int(G.graph.get("RANDOM_SEED", 0))
134
- # opcional: pequeño offset para evitar misma secuencia en todos los nodos/pasos
135
- step_idx = len(G.graph.get("history", {}).get("C_steps", []))
136
- rnd = random.Random(base_seed + step_idx*1000003 + _node_offset(G, n) % 1009)
141
+
142
+ def _op_SHA(node: NodoProtocol) -> None: # SH’A — Silencio
143
+ factor = float(node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("SHA_vf_factor", 0.85))
144
+ node.vf = factor * node.vf
145
+
146
+
147
+ def _op_VAL(node: NodoProtocol) -> None: # VA’L — Expansión
148
+ s = float(node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("VAL_scale", 1.15))
149
+ node.EPI = s * node.EPI
150
+ node.epi_kind = "VA’L"
151
+
152
+
153
+ def _op_NUL(node: NodoProtocol) -> None: # NU’L — Contracción
154
+ s = float(node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("NUL_scale", 0.85))
155
+ node.EPI = s * node.EPI
156
+ node.epi_kind = "NU’L"
157
+
158
+
159
+ def _op_THOL(node: NodoProtocol) -> None: # T’HOL — Autoorganización
160
+ a = float(node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("THOL_accel", 0.10))
161
+ node.dnfr = node.dnfr + a * getattr(node, "d2EPI", 0.0)
162
+
163
+
164
+ def _op_ZHIR(node: NodoProtocol) -> None: # Z’HIR — Mutación
165
+ shift = float(node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"]).get("ZHIR_theta_shift", 1.57079632679))
166
+ node.theta = node.theta + shift
167
+
168
+
169
+ def _op_NAV(node: NodoProtocol) -> None: # NA’V — Transición
170
+ dnfr = node.dnfr
171
+ vf = node.vf
172
+ gf = node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"])
173
+ eta = float(gf.get("NAV_eta", 0.5))
174
+ strict = bool(node.graph.get("NAV_STRICT", False))
175
+ if strict:
176
+ base = vf
177
+ else:
178
+ sign = 1.0 if dnfr >= 0 else -1.0
179
+ target = sign * vf
180
+ base = (1.0 - eta) * dnfr + eta * target
181
+ j = float(gf.get("NAV_jitter", 0.05))
182
+ if bool(node.graph.get("NAV_RANDOM", True)):
183
+ base_seed = int(node.graph.get("RANDOM_SEED", 0))
184
+ step_idx = len(node.graph.get("history", {}).get("C_steps", []))
185
+ rnd = random.Random(base_seed + step_idx * 1000003 + node.offset() % 1009)
137
186
  jitter = j * (2.0 * rnd.random() - 1.0)
138
187
  else:
139
- # comportamiento determinista (compatibilidad previa)
140
- jitter = j * (1 if dnfr >= 0 else -1)
141
- _set_attr(nd, ALIAS_DNFR, dnfr + jitter)
188
+ jitter = j * (1 if base >= 0 else -1)
189
+ node.dnfr = base + jitter
142
190
 
143
- def op_REMESH(G, n): # RE’MESH — se realiza a escala de red (no-op local con aviso)
144
- # Loguea solo 1 vez por paso para no spamear
145
- step_idx = len(G.graph.get("history", {}).get("C_steps", []))
146
- last_warn = G.graph.get("_remesh_warn_step", None)
191
+
192
+ def _op_REMESH(node: NodoProtocol) -> None: # RE’MESH aviso
193
+ step_idx = len(node.graph.get("history", {}).get("C_steps", []))
194
+ last_warn = node.graph.get("_remesh_warn_step", None)
147
195
  if last_warn != step_idx:
148
196
  msg = "RE’MESH es a escala de red. Usa aplicar_remesh_si_estabilizacion_global(G) o aplicar_remesh_red(G)."
149
- G.graph.setdefault("history", {}).setdefault("events", []).append(("warn", {"step": step_idx, "node": n, "msg": msg}))
150
- G.graph["_remesh_warn_step"] = step_idx
151
- # no cambia estado local
197
+ node.graph.setdefault("history", {}).setdefault("events", []).append(("warn", {"step": step_idx, "node": None, "msg": msg}))
198
+ node.graph["_remesh_warn_step"] = step_idx
152
199
  return
153
200
 
154
201
  # -------------------------
@@ -156,33 +203,50 @@ def op_REMESH(G, n): # RE’MESH — se realiza a escala de red (no-op local co
156
203
  # -------------------------
157
204
 
158
205
  _NAME_TO_OP = {
159
- "A’L": op_AL, "E’N": op_EN, "I’L": op_IL, "O’Z": op_OZ, "U’M": op_UM,
160
- "R’A": op_RA, "SH’A": op_SHA, "VA’L": op_VAL, "NU’L": op_NUL,
161
- "T’HOL": op_THOL, "Z’HIR": op_ZHIR, "NA’V": op_NAV, "RE’MESH": op_REMESH,
206
+ "A’L": _op_AL, "E’N": _op_EN, "I’L": _op_IL, "O’Z": _op_OZ, "U’M": _op_UM,
207
+ "R’A": _op_RA, "SH’A": _op_SHA, "VA’L": _op_VAL, "NU’L": _op_NUL,
208
+ "T’HOL": _op_THOL, "Z’HIR": _op_ZHIR, "NA’V": _op_NAV, "RE’MESH": _op_REMESH,
162
209
  }
163
210
 
164
211
 
165
- def aplicar_glifo(G, n, glifo: str, *, window: Optional[int] = None) -> None:
166
- """Aplica un glifo TNFR al nodo `n` con histéresis `window`.
212
+ def _wrap(fn):
213
+ def inner(obj, n=None):
214
+ node = obj if n is None else NodoNX(obj, n)
215
+ return fn(node)
216
+ return inner
167
217
 
168
- Los 13 glifos implementan reorganizadores canónicos:
169
- A’L (emisión), E’N (recepción), I’L (coherencia), O’Z (disonancia),
170
- U’M (acoplamiento), R’A (resonancia), SH’A (silencio), VA’L (expansión),
171
- NU’L (contracción), T’HOL (autoorganización), Z’HIR (mutación),
172
- NA’V (transición), RE’MESH (recursividad).
218
+ op_AL = _wrap(_op_AL)
219
+ op_EN = _wrap(_op_EN)
220
+ op_IL = _wrap(_op_IL)
221
+ op_OZ = _wrap(_op_OZ)
222
+ op_UM = _wrap(_op_UM)
223
+ op_RA = _wrap(_op_RA)
224
+ op_SHA = _wrap(_op_SHA)
225
+ op_VAL = _wrap(_op_VAL)
226
+ op_NUL = _wrap(_op_NUL)
227
+ op_THOL = _wrap(_op_THOL)
228
+ op_ZHIR = _wrap(_op_ZHIR)
229
+ op_NAV = _wrap(_op_NAV)
230
+ op_REMESH = _wrap(_op_REMESH)
231
+
232
+
233
+ def aplicar_glifo_obj(node: NodoProtocol, glifo: str, *, window: Optional[int] = None) -> None:
234
+ """Aplica ``glifo`` a un objeto que cumple :class:`NodoProtocol`."""
173
235
 
174
- Relación con la gramática: la selección previa debe pasar por
175
- `enforce_canonical_grammar` (grammar.py) para respetar compatibilidades,
176
- precondición O’Z→Z’HIR y cierres T’HOL[...].
177
- """
178
236
  glifo = str(glifo)
179
237
  op = _NAME_TO_OP.get(glifo)
180
238
  if not op:
181
239
  return
182
240
  if window is None:
183
- window = int(G.graph.get("GLYPH_HYSTERESIS_WINDOW", DEFAULTS["GLYPH_HYSTERESIS_WINDOW"]))
184
- push_glifo(G.nodes[n], glifo, window)
185
- op(G, n)
241
+ window = int(node.graph.get("GLYPH_HYSTERESIS_WINDOW", DEFAULTS["GLYPH_HYSTERESIS_WINDOW"]))
242
+ node.push_glifo(glifo, window)
243
+ op(node)
244
+
245
+
246
+ def aplicar_glifo(G, n, glifo: str, *, window: Optional[int] = None) -> None:
247
+ """Adaptador para operar sobre grafos ``networkx``."""
248
+ node = NodoNX(G, n)
249
+ aplicar_glifo_obj(node, glifo, window=window)
186
250
 
187
251
 
188
252
  # -------------------------
@@ -190,31 +254,31 @@ def aplicar_glifo(G, n, glifo: str, *, window: Optional[int] = None) -> None:
190
254
  # -------------------------
191
255
 
192
256
  def _remesh_alpha_info(G):
193
- """Devuelve (alpha, source) con precedencia explícita."""
194
- hard = bool(G.graph.get("REMESH_ALPHA_HARD", DEFAULTS.get("REMESH_ALPHA_HARD", False)))
195
- gf = G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"])
196
- if not hard and "REMESH_alpha" in gf:
197
- return float(gf["REMESH_alpha"]), "GLYPH_FACTORS"
198
- if "REMESH_ALPHA" in G.graph:
199
- return float(G.graph["REMESH_ALPHA"]), "G.graph"
257
+ """Devuelve `(alpha, source)` con precedencia explícita."""
258
+ if bool(G.graph.get("REMESH_ALPHA_HARD", DEFAULTS.get("REMESH_ALPHA_HARD", False))):
259
+ val = float(G.graph.get("REMESH_ALPHA", DEFAULTS["REMESH_ALPHA"]))
260
+ return val, "REMESH_ALPHA"
261
+ gf = G.graph.get("GLYPH_FACTORS", DEFAULTS.get("GLYPH_FACTORS", {}))
200
262
  if "REMESH_alpha" in gf:
201
- return float(gf["REMESH_alpha"]), "GLYPH_FACTORS"
202
- return float(DEFAULTS["REMESH_ALPHA"]), "DEFAULTS"
263
+ return float(gf["REMESH_alpha"]), "GLYPH_FACTORS.REMESH_alpha"
264
+ if "REMESH_ALPHA" in G.graph:
265
+ return float(G.graph["REMESH_ALPHA"]), "REMESH_ALPHA"
266
+ return float(DEFAULTS["REMESH_ALPHA"]), "DEFAULTS.REMESH_ALPHA"
203
267
 
204
268
 
205
269
  def aplicar_remesh_red(G) -> None:
206
- """
207
- RE’MESH a escala de red usando _epi_hist capturado en dynamics.step.
208
- Loguea meta con alpha/tau/step + topo_hash y checksums/medias de EPI antes/después.
209
- Precedencia de alpha: GLYPH_FACTORS → G.graph → DEFAULTS.
210
- """
211
- tau = int(G.graph.get("REMESH_TAU", DEFAULTS["REMESH_TAU"]))
270
+ """RE’MESH a escala de red usando _epi_hist con memoria multi-escala."""
271
+ tau_g = int(G.graph.get("REMESH_TAU_GLOBAL", G.graph.get("REMESH_TAU", DEFAULTS["REMESH_TAU_GLOBAL"])))
272
+ tau_l = int(G.graph.get("REMESH_TAU_LOCAL", G.graph.get("REMESH_TAU", DEFAULTS["REMESH_TAU_LOCAL"])))
273
+ tau_req = max(tau_g, tau_l)
212
274
  alpha, alpha_src = _remesh_alpha_info(G)
275
+ G.graph["_REMESH_ALPHA_SRC"] = alpha_src
213
276
  hist = G.graph.get("_epi_hist", deque())
214
- if len(hist) < tau + 1:
277
+ if len(hist) < tau_req + 1:
215
278
  return
216
279
 
217
- past = hist[-(tau + 1)]
280
+ past_g = hist[-(tau_g + 1)]
281
+ past_l = hist[-(tau_l + 1)]
218
282
 
219
283
  # --- Topología + snapshot EPI (ANTES) ---
220
284
  try:
@@ -239,8 +303,11 @@ def aplicar_remesh_red(G) -> None:
239
303
  for n in G.nodes():
240
304
  nd = G.nodes[n]
241
305
  epi_now = _get_attr(nd, ALIAS_EPI, 0.0)
242
- epi_old = float(past.get(n, epi_now))
243
- _set_attr(nd, ALIAS_EPI, (1 - alpha) * epi_now + alpha * epi_old)
306
+ epi_old_l = float(past_l.get(n, epi_now))
307
+ epi_old_g = float(past_g.get(n, epi_now))
308
+ mixed = (1 - alpha) * epi_now + alpha * epi_old_l
309
+ mixed = (1 - alpha) * mixed + alpha * epi_old_g
310
+ _set_attr(nd, ALIAS_EPI, mixed)
244
311
 
245
312
  # --- Snapshot EPI (DESPUÉS) ---
246
313
  epi_mean_after = list_mean(_get_attr(G.nodes[n], ALIAS_EPI, 0.0) for n in G.nodes())
@@ -253,7 +320,8 @@ def aplicar_remesh_red(G) -> None:
253
320
  meta = {
254
321
  "alpha": alpha,
255
322
  "alpha_source": alpha_src,
256
- "tau": tau,
323
+ "tau_global": tau_g,
324
+ "tau_local": tau_l,
257
325
  "step": step_idx,
258
326
  # firmas
259
327
  "topo_hash": topo_hash,
@@ -278,6 +346,115 @@ def aplicar_remesh_red(G) -> None:
278
346
  # Callbacks Γ(R)
279
347
  invoke_callbacks(G, "on_remesh", dict(meta))
280
348
 
349
+
350
+ def aplicar_remesh_red_topologico(
351
+ G,
352
+ mode: Optional[str] = None,
353
+ *,
354
+ k: Optional[int] = None,
355
+ p_rewire: float = 0.2,
356
+ seed: Optional[int] = None,
357
+ ) -> None:
358
+ """Remallado topológico aproximado.
359
+
360
+ - mode="knn": conecta cada nodo con sus ``k`` vecinos más similares en EPI
361
+ con probabilidad ``p_rewire``.
362
+ - mode="mst": sólo preserva un árbol de expansión mínima según distancia EPI.
363
+ - mode="community": agrupa por comunidades modulares y las conecta por
364
+ similitud intercomunidad.
365
+
366
+ Siempre preserva conectividad añadiendo un MST base.
367
+ """
368
+ nodes = list(G.nodes())
369
+ n_before = len(nodes)
370
+ if n_before <= 1:
371
+ return
372
+ rnd = random.Random(seed)
373
+
374
+ if mode is None:
375
+ mode = str(G.graph.get("REMESH_MODE", DEFAULTS.get("REMESH_MODE", "knn")))
376
+ mode = str(mode)
377
+
378
+ # Similaridad basada en EPI (distancia absoluta)
379
+ epi = {n: _get_attr(G.nodes[n], ALIAS_EPI, 0.0) for n in nodes}
380
+ H = nx.Graph()
381
+ H.add_nodes_from(nodes)
382
+ for i, u in enumerate(nodes):
383
+ for v in nodes[i + 1 :]:
384
+ w = abs(epi[u] - epi[v])
385
+ H.add_edge(u, v, weight=w)
386
+ mst = nx.minimum_spanning_tree(H, weight="weight")
387
+
388
+ if mode == "community":
389
+ # Detectar comunidades y reconstruir la red con metanodos
390
+ comms = list(nx_comm.greedy_modularity_communities(G))
391
+ if len(comms) <= 1:
392
+ new_edges = set(mst.edges())
393
+ else:
394
+ k_val = (
395
+ int(k)
396
+ if k is not None
397
+ else int(G.graph.get("REMESH_COMMUNITY_K", DEFAULTS.get("REMESH_COMMUNITY_K", 2)))
398
+ )
399
+ # Grafo de comunidades basado en medias de EPI
400
+ C = nx.Graph()
401
+ for idx, comm in enumerate(comms):
402
+ members = list(comm)
403
+ epi_mean = list_mean(epi[n] for n in members)
404
+ C.add_node(idx)
405
+ _set_attr(C.nodes[idx], ALIAS_EPI, epi_mean)
406
+ C.nodes[idx]["members"] = members
407
+ for i in C.nodes():
408
+ for j in C.nodes():
409
+ if i < j:
410
+ w = abs(
411
+ _get_attr(C.nodes[i], ALIAS_EPI, 0.0)
412
+ - _get_attr(C.nodes[j], ALIAS_EPI, 0.0)
413
+ )
414
+ C.add_edge(i, j, weight=w)
415
+ mst_c = nx.minimum_spanning_tree(C, weight="weight")
416
+ new_edges = set(mst_c.edges())
417
+ for u in C.nodes():
418
+ epi_u = _get_attr(C.nodes[u], ALIAS_EPI, 0.0)
419
+ others = [v for v in C.nodes() if v != u]
420
+ others.sort(key=lambda v: abs(epi_u - _get_attr(C.nodes[v], ALIAS_EPI, 0.0)))
421
+ for v in others[:k_val]:
422
+ if rnd.random() < p_rewire:
423
+ new_edges.add(tuple(sorted((u, v))))
424
+
425
+ # Reemplazar nodos y aristas del grafo original por comunidades
426
+ G.remove_edges_from(list(G.edges()))
427
+ G.remove_nodes_from(list(G.nodes()))
428
+ for idx in C.nodes():
429
+ data = dict(C.nodes[idx])
430
+ G.add_node(idx, **data)
431
+ G.add_edges_from(new_edges)
432
+
433
+ if G.graph.get("REMESH_LOG_EVENTS", DEFAULTS["REMESH_LOG_EVENTS"]):
434
+ ev = G.graph.setdefault("history", {}).setdefault("remesh_events", [])
435
+ mapping = {idx: C.nodes[idx].get("members", []) for idx in C.nodes()}
436
+ ev.append({
437
+ "mode": "community",
438
+ "n_before": n_before,
439
+ "n_after": G.number_of_nodes(),
440
+ "mapping": mapping,
441
+ })
442
+ return
443
+
444
+ # Default/mode knn/mst operate on nodos originales
445
+ new_edges = set(mst.edges())
446
+ if mode == "knn":
447
+ k_val = int(k) if k is not None else int(G.graph.get("REMESH_COMMUNITY_K", DEFAULTS.get("REMESH_COMMUNITY_K", 2)))
448
+ k_val = max(1, k_val)
449
+ for u in nodes:
450
+ sims = sorted(nodes, key=lambda v: abs(epi[u] - epi[v]))
451
+ for v in sims[1 : k_val + 1]:
452
+ if rnd.random() < p_rewire:
453
+ new_edges.add(tuple(sorted((u, v))))
454
+
455
+ G.remove_edges_from(list(G.edges()))
456
+ G.add_edges_from(new_edges)
457
+
281
458
  def aplicar_remesh_si_estabilizacion_global(G, pasos_estables_consecutivos: Optional[int] = None) -> None:
282
459
  # Ventanas y umbrales
283
460
  w_estab = (
@@ -289,6 +466,9 @@ def aplicar_remesh_si_estabilizacion_global(G, pasos_estables_consecutivos: Opti
289
466
  req_extra = bool(G.graph.get("REMESH_REQUIRE_STABILITY", DEFAULTS["REMESH_REQUIRE_STABILITY"]))
290
467
  min_sync = float(G.graph.get("REMESH_MIN_PHASE_SYNC", DEFAULTS["REMESH_MIN_PHASE_SYNC"]))
291
468
  max_disr = float(G.graph.get("REMESH_MAX_GLYPH_DISR", DEFAULTS["REMESH_MAX_GLYPH_DISR"]))
469
+ min_sigma = float(G.graph.get("REMESH_MIN_SIGMA_MAG", DEFAULTS["REMESH_MIN_SIGMA_MAG"]))
470
+ min_R = float(G.graph.get("REMESH_MIN_KURAMOTO_R", DEFAULTS["REMESH_MIN_KURAMOTO_R"]))
471
+ min_sihi = float(G.graph.get("REMESH_MIN_SI_HI_FRAC", DEFAULTS["REMESH_MIN_SI_HI_FRAC"]))
292
472
 
293
473
  hist = G.graph.setdefault("history", {"stable_frac": []})
294
474
  sf = hist.get("stable_frac", [])
@@ -311,7 +491,22 @@ def aplicar_remesh_si_estabilizacion_global(G, pasos_estables_consecutivos: Opti
311
491
  if "glyph_load_disr" in hist and len(hist["glyph_load_disr"]) >= w_estab:
312
492
  win_disr = hist["glyph_load_disr"][-w_estab:]
313
493
  disr_ok = (sum(win_disr)/len(win_disr)) <= max_disr
314
- if not (ps_ok and disr_ok):
494
+ # magnitud de sigma (mayor mejor)
495
+ sig_ok = True
496
+ if "sense_sigma_mag" in hist and len(hist["sense_sigma_mag"]) >= w_estab:
497
+ win_sig = hist["sense_sigma_mag"][-w_estab:]
498
+ sig_ok = (sum(win_sig)/len(win_sig)) >= min_sigma
499
+ # orden de Kuramoto R (mayor mejor)
500
+ R_ok = True
501
+ if "kuramoto_R" in hist and len(hist["kuramoto_R"]) >= w_estab:
502
+ win_R = hist["kuramoto_R"][-w_estab:]
503
+ R_ok = (sum(win_R)/len(win_R)) >= min_R
504
+ # fracción de nodos con Si alto (mayor mejor)
505
+ sihi_ok = True
506
+ if "Si_hi_frac" in hist and len(hist["Si_hi_frac"]) >= w_estab:
507
+ win_sihi = hist["Si_hi_frac"][-w_estab:]
508
+ sihi_ok = (sum(win_sihi)/len(win_sihi)) >= min_sihi
509
+ if not (ps_ok and disr_ok and sig_ok and R_ok and sihi_ok):
315
510
  return
316
511
  # 3) Cooldown
317
512
  last = G.graph.get("_last_remesh_step", -10**9)
tnfr/presets.py CHANGED
@@ -16,6 +16,9 @@ _PRESETS = {
16
16
  "SH’A",
17
17
  ),
18
18
  "ejemplo_canonico": ejemplo_canonico_basico(),
19
+ # Topologías fractales: expansión/contracción modular
20
+ "fractal_expand": seq(block("T’HOL", "VA’L", "U’M", repeat=2, close="NU’L"), "R’A"),
21
+ "fractal_contract": seq(block("T’HOL", "NU’L", "U’M", repeat=2, close="SH’A"), "R’A"),
19
22
  }
20
23
 
21
24
 
tnfr/scenarios.py CHANGED
@@ -17,12 +17,18 @@ def build_graph(n: int = 24, topology: str = "ring", seed: int | None = 1):
17
17
  else:
18
18
  G = nx.path_graph(n)
19
19
 
20
+ # Valores canónicos para inicialización
21
+ inject_defaults(G, DEFAULTS)
22
+ vf_min = float(G.graph.get("VF_MIN", DEFAULTS["VF_MIN"]))
23
+ vf_max = float(G.graph.get("VF_MAX", DEFAULTS["VF_MAX"]))
24
+ th_min = float(G.graph.get("INIT_THETA_MIN", DEFAULTS.get("INIT_THETA_MIN", -3.1416)))
25
+ th_max = float(G.graph.get("INIT_THETA_MAX", DEFAULTS.get("INIT_THETA_MAX", 3.1416)))
26
+
20
27
  for i in G.nodes():
21
28
  nd = G.nodes[i]
22
29
  nd.setdefault("EPI", rng.uniform(0.1, 0.3))
23
- nd.setdefault("νf", rng.uniform(0.8, 1.2))
24
- nd.setdefault("θ", rng.uniform(-3.1416, 3.1416))
30
+ nd.setdefault("νf", rng.uniform(vf_min, vf_max))
31
+ nd.setdefault("θ", rng.uniform(th_min, th_max))
25
32
  nd.setdefault("Si", rng.uniform(0.4, 0.7))
26
33
 
27
- inject_defaults(G, DEFAULTS)
28
34
  return G