tnfr 4.5.1__py3-none-any.whl → 6.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (170) hide show
  1. tnfr/__init__.py +270 -90
  2. tnfr/__init__.pyi +40 -0
  3. tnfr/_compat.py +11 -0
  4. tnfr/_version.py +7 -0
  5. tnfr/_version.pyi +7 -0
  6. tnfr/alias.py +631 -0
  7. tnfr/alias.pyi +140 -0
  8. tnfr/cache.py +732 -0
  9. tnfr/cache.pyi +232 -0
  10. tnfr/callback_utils.py +381 -0
  11. tnfr/callback_utils.pyi +105 -0
  12. tnfr/cli/__init__.py +89 -0
  13. tnfr/cli/__init__.pyi +47 -0
  14. tnfr/cli/arguments.py +199 -0
  15. tnfr/cli/arguments.pyi +33 -0
  16. tnfr/cli/execution.py +322 -0
  17. tnfr/cli/execution.pyi +80 -0
  18. tnfr/cli/utils.py +34 -0
  19. tnfr/cli/utils.pyi +8 -0
  20. tnfr/config/__init__.py +12 -0
  21. tnfr/config/__init__.pyi +8 -0
  22. tnfr/config/constants.py +104 -0
  23. tnfr/config/constants.pyi +12 -0
  24. tnfr/config/init.py +36 -0
  25. tnfr/config/init.pyi +8 -0
  26. tnfr/config/operator_names.py +106 -0
  27. tnfr/config/operator_names.pyi +28 -0
  28. tnfr/config/presets.py +104 -0
  29. tnfr/config/presets.pyi +7 -0
  30. tnfr/constants/__init__.py +228 -0
  31. tnfr/constants/__init__.pyi +104 -0
  32. tnfr/constants/core.py +158 -0
  33. tnfr/constants/core.pyi +17 -0
  34. tnfr/constants/init.py +31 -0
  35. tnfr/constants/init.pyi +12 -0
  36. tnfr/constants/metric.py +102 -0
  37. tnfr/constants/metric.pyi +19 -0
  38. tnfr/constants_glyphs.py +16 -0
  39. tnfr/constants_glyphs.pyi +12 -0
  40. tnfr/dynamics/__init__.py +136 -0
  41. tnfr/dynamics/__init__.pyi +83 -0
  42. tnfr/dynamics/adaptation.py +201 -0
  43. tnfr/dynamics/aliases.py +22 -0
  44. tnfr/dynamics/coordination.py +343 -0
  45. tnfr/dynamics/dnfr.py +2315 -0
  46. tnfr/dynamics/dnfr.pyi +33 -0
  47. tnfr/dynamics/integrators.py +561 -0
  48. tnfr/dynamics/integrators.pyi +35 -0
  49. tnfr/dynamics/runtime.py +521 -0
  50. tnfr/dynamics/sampling.py +34 -0
  51. tnfr/dynamics/sampling.pyi +7 -0
  52. tnfr/dynamics/selectors.py +680 -0
  53. tnfr/execution.py +216 -0
  54. tnfr/execution.pyi +65 -0
  55. tnfr/flatten.py +283 -0
  56. tnfr/flatten.pyi +28 -0
  57. tnfr/gamma.py +320 -89
  58. tnfr/gamma.pyi +40 -0
  59. tnfr/glyph_history.py +337 -0
  60. tnfr/glyph_history.pyi +53 -0
  61. tnfr/grammar.py +23 -153
  62. tnfr/grammar.pyi +13 -0
  63. tnfr/helpers/__init__.py +151 -0
  64. tnfr/helpers/__init__.pyi +66 -0
  65. tnfr/helpers/numeric.py +88 -0
  66. tnfr/helpers/numeric.pyi +12 -0
  67. tnfr/immutable.py +214 -0
  68. tnfr/immutable.pyi +37 -0
  69. tnfr/initialization.py +199 -0
  70. tnfr/initialization.pyi +73 -0
  71. tnfr/io.py +311 -0
  72. tnfr/io.pyi +11 -0
  73. tnfr/locking.py +37 -0
  74. tnfr/locking.pyi +7 -0
  75. tnfr/metrics/__init__.py +41 -0
  76. tnfr/metrics/__init__.pyi +20 -0
  77. tnfr/metrics/coherence.py +1469 -0
  78. tnfr/metrics/common.py +149 -0
  79. tnfr/metrics/common.pyi +15 -0
  80. tnfr/metrics/core.py +259 -0
  81. tnfr/metrics/core.pyi +13 -0
  82. tnfr/metrics/diagnosis.py +840 -0
  83. tnfr/metrics/diagnosis.pyi +89 -0
  84. tnfr/metrics/export.py +151 -0
  85. tnfr/metrics/glyph_timing.py +369 -0
  86. tnfr/metrics/reporting.py +152 -0
  87. tnfr/metrics/reporting.pyi +12 -0
  88. tnfr/metrics/sense_index.py +294 -0
  89. tnfr/metrics/sense_index.pyi +9 -0
  90. tnfr/metrics/trig.py +216 -0
  91. tnfr/metrics/trig.pyi +12 -0
  92. tnfr/metrics/trig_cache.py +105 -0
  93. tnfr/metrics/trig_cache.pyi +10 -0
  94. tnfr/node.py +255 -177
  95. tnfr/node.pyi +161 -0
  96. tnfr/observers.py +154 -150
  97. tnfr/observers.pyi +46 -0
  98. tnfr/ontosim.py +135 -134
  99. tnfr/ontosim.pyi +33 -0
  100. tnfr/operators/__init__.py +452 -0
  101. tnfr/operators/__init__.pyi +31 -0
  102. tnfr/operators/definitions.py +181 -0
  103. tnfr/operators/definitions.pyi +92 -0
  104. tnfr/operators/jitter.py +266 -0
  105. tnfr/operators/jitter.pyi +11 -0
  106. tnfr/operators/registry.py +80 -0
  107. tnfr/operators/registry.pyi +15 -0
  108. tnfr/operators/remesh.py +569 -0
  109. tnfr/presets.py +10 -23
  110. tnfr/presets.pyi +7 -0
  111. tnfr/py.typed +0 -0
  112. tnfr/rng.py +440 -0
  113. tnfr/rng.pyi +14 -0
  114. tnfr/selector.py +217 -0
  115. tnfr/selector.pyi +19 -0
  116. tnfr/sense.py +307 -142
  117. tnfr/sense.pyi +30 -0
  118. tnfr/structural.py +69 -164
  119. tnfr/structural.pyi +46 -0
  120. tnfr/telemetry/__init__.py +13 -0
  121. tnfr/telemetry/verbosity.py +37 -0
  122. tnfr/tokens.py +61 -0
  123. tnfr/tokens.pyi +41 -0
  124. tnfr/trace.py +520 -95
  125. tnfr/trace.pyi +68 -0
  126. tnfr/types.py +382 -17
  127. tnfr/types.pyi +145 -0
  128. tnfr/utils/__init__.py +158 -0
  129. tnfr/utils/__init__.pyi +133 -0
  130. tnfr/utils/cache.py +755 -0
  131. tnfr/utils/cache.pyi +156 -0
  132. tnfr/utils/data.py +267 -0
  133. tnfr/utils/data.pyi +73 -0
  134. tnfr/utils/graph.py +87 -0
  135. tnfr/utils/graph.pyi +10 -0
  136. tnfr/utils/init.py +746 -0
  137. tnfr/utils/init.pyi +85 -0
  138. tnfr/utils/io.py +157 -0
  139. tnfr/utils/io.pyi +10 -0
  140. tnfr/utils/validators.py +130 -0
  141. tnfr/utils/validators.pyi +19 -0
  142. tnfr/validation/__init__.py +25 -0
  143. tnfr/validation/__init__.pyi +17 -0
  144. tnfr/validation/compatibility.py +59 -0
  145. tnfr/validation/compatibility.pyi +8 -0
  146. tnfr/validation/grammar.py +149 -0
  147. tnfr/validation/grammar.pyi +11 -0
  148. tnfr/validation/rules.py +194 -0
  149. tnfr/validation/rules.pyi +18 -0
  150. tnfr/validation/syntax.py +151 -0
  151. tnfr/validation/syntax.pyi +7 -0
  152. tnfr-6.0.0.dist-info/METADATA +135 -0
  153. tnfr-6.0.0.dist-info/RECORD +157 -0
  154. tnfr/cli.py +0 -322
  155. tnfr/config.py +0 -41
  156. tnfr/constants.py +0 -277
  157. tnfr/dynamics.py +0 -814
  158. tnfr/helpers.py +0 -264
  159. tnfr/main.py +0 -47
  160. tnfr/metrics.py +0 -597
  161. tnfr/operators.py +0 -525
  162. tnfr/program.py +0 -176
  163. tnfr/scenarios.py +0 -34
  164. tnfr/validators.py +0 -38
  165. tnfr-4.5.1.dist-info/METADATA +0 -221
  166. tnfr-4.5.1.dist-info/RECORD +0 -28
  167. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/WHEEL +0 -0
  168. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/entry_points.txt +0 -0
  169. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/licenses/LICENSE.md +0 -0
  170. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/top_level.txt +0 -0
tnfr/metrics.py DELETED
@@ -1,597 +0,0 @@
1
- from __future__ import annotations
2
- from typing import Dict, Any, List, Tuple
3
- from collections import defaultdict, Counter
4
- import statistics
5
- import csv
6
- import json
7
- from math import cos
8
-
9
- from .constants import DEFAULTS, ALIAS_EPI, ALIAS_THETA, ALIAS_DNFR
10
- from .helpers import (
11
- register_callback,
12
- ensure_history,
13
- last_glifo,
14
- _get_attr,
15
- clamp01,
16
- list_mean,
17
- fmean,
18
- )
19
- from .sense import GLYPHS_CANONICAL
20
-
21
- # -------------
22
- # DEFAULTS
23
- # -------------
24
- DEFAULTS.setdefault("METRICS", {
25
- "enabled": True,
26
- "save_by_node": True, # guarda Tg por nodo (más pesado)
27
- "normalize_series": False # glifograma normalizado a fracción por paso
28
- })
29
-
30
-
31
-
32
- # -------------
33
- # Utilidades internas
34
- # -------------
35
-
36
-
37
- # -------------
38
- # Estado nodal para Tg
39
- # -------------
40
-
41
- def _tg_state(nd: Dict[str, Any]) -> Dict[str, Any]:
42
- """Estructura interna por nodo para acumular tiempos de corrida por glifo.
43
- Campos: curr (glifo actual), run (tiempo acumulado en el glifo actual)
44
- """
45
- return nd.setdefault("_Tg", {"curr": None, "run": 0.0})
46
-
47
-
48
- # -------------
49
- # Callback principal: actualizar métricas por paso
50
- # -------------
51
-
52
- def _metrics_step(G, *args, **kwargs):
53
- """Actualiza métricas operativas TNFR por paso.
54
-
55
- - Tg (tiempo glífico): sumatoria de corridas por glifo (global y por nodo).
56
- - Índice de latencia: fracción de nodos en SH’A.
57
- - Glifograma: conteo o fracción por glifo en el paso.
58
-
59
- Todos los resultados se guardan en G.graph['history'].
60
- """
61
- if not G.graph.get("METRICS", DEFAULTS.get("METRICS", {})).get("enabled", True):
62
- return
63
-
64
- hist = ensure_history(G)
65
- dt = float(G.graph.get("DT", 1.0))
66
- t = float(G.graph.get("_t", 0.0))
67
-
68
- # --- Glifograma (conteos por glifo este paso) ---
69
- counts = Counter()
70
-
71
- # --- Índice de latencia: proporción de nodos en SH’A ---
72
- n_total = 0
73
- n_latent = 0
74
-
75
- # --- Tg: acumular corridas por nodo ---
76
- save_by_node = bool(G.graph.get("METRICS", DEFAULTS["METRICS"]).get("save_by_node", True))
77
- tg_total = hist.setdefault("Tg_total", defaultdict(float)) # tiempo total por glifo (global)
78
- tg_by_node = hist.setdefault("Tg_by_node", {}) # nodo → {glifo: [runs,...]}
79
-
80
- for n in G.nodes():
81
- nd = G.nodes[n]
82
- g = last_glifo(nd)
83
- if not g:
84
- continue
85
-
86
- n_total += 1
87
- if g == "SH’A":
88
- n_latent += 1
89
-
90
- counts[g] += 1
91
-
92
- st = _tg_state(nd)
93
- # Si seguimos en el mismo glifo, acumulamos; si cambiamos, cerramos corrida
94
- if st["curr"] is None:
95
- st["curr"] = g
96
- st["run"] = dt
97
- elif g == st["curr"]:
98
- st["run"] += dt
99
- else:
100
- # cerramos corrida anterior
101
- prev = st["curr"]
102
- dur = float(st["run"])
103
- tg_total[prev] += dur
104
- if save_by_node:
105
- rec = tg_by_node.setdefault(n, defaultdict(list))
106
- rec[prev].append(dur)
107
- # reiniciamos corrida
108
- st["curr"] = g
109
- st["run"] = dt
110
-
111
- # Al final del paso, no cerramos la corrida actual: se cerrará cuando cambie.
112
-
113
- # Guardar glifograma (conteos crudos y normalizados)
114
- norm = bool(G.graph.get("METRICS", DEFAULTS["METRICS"]).get("normalize_series", False))
115
- row = {"t": t}
116
- total = max(1, sum(counts.values()))
117
- for g in GLYPHS_CANONICAL:
118
- c = counts.get(g, 0)
119
- row[g] = (c / total) if norm else c
120
- hist.setdefault("glifogram", []).append(row)
121
-
122
- # Guardar índice de latencia
123
- li = (n_latent / max(1, n_total)) if n_total else 0.0
124
- hist.setdefault("latency_index", []).append({"t": t, "value": li})
125
-
126
- # --- Soporte y norma de la EPI ---
127
- thr = float(G.graph.get("EPI_SUPPORT_THR", DEFAULTS.get("EPI_SUPPORT_THR", 0.0)))
128
- supp_nodes = [n for n in G.nodes() if abs(_get_attr(G.nodes[n], ALIAS_EPI, 0.0)) >= thr]
129
- norm = (
130
- sum(abs(_get_attr(G.nodes[n], ALIAS_EPI, 0.0)) for n in supp_nodes) / len(supp_nodes)
131
- if supp_nodes else 0.0
132
- )
133
- hist.setdefault("EPI_support", []).append({"t": t, "size": len(supp_nodes), "norm": float(norm)})
134
-
135
- # --- Métricas morfosintácticas ---
136
- total = max(1, sum(counts.values()))
137
- id_val = counts.get("O’Z", 0) / total
138
- cm_val = (counts.get("Z’HIR", 0) + counts.get("NA’V", 0)) / total
139
- ne_val = (counts.get("I’L", 0) + counts.get("T’HOL", 0)) / total
140
- pp_val = counts.get("SH’A", 0) / max(1, counts.get("RE’MESH", 0))
141
- hist.setdefault("morph", []).append({"t": t, "ID": id_val, "CM": cm_val, "NE": ne_val, "PP": pp_val})
142
-
143
-
144
- # -------------
145
- # Registro del callback
146
- # -------------
147
-
148
- def register_metrics_callbacks(G) -> None:
149
- register_callback(G, when="after_step", func=_metrics_step, name="metrics_step")
150
- # Nuevas funcionalidades canónicas
151
- register_coherence_callbacks(G)
152
- register_diagnosis_callbacks(G)
153
-
154
-
155
- # -------------
156
- # Consultas / reportes
157
- # -------------
158
-
159
- def Tg_global(G, normalize: bool = True) -> Dict[str, float]:
160
- """Tiempo glífico total por clase. Si normalize=True, devuelve fracciones del total."""
161
- hist = ensure_history(G)
162
- tg_total: Dict[str, float] = hist.get("Tg_total", {})
163
- total = sum(tg_total.values()) or 1.0
164
- if normalize:
165
- return {g: float(tg_total.get(g, 0.0)) / total for g in GLYPHS_CANONICAL}
166
- return {g: float(tg_total.get(g, 0.0)) for g in GLYPHS_CANONICAL}
167
-
168
-
169
- def Tg_by_node(G, n, normalize: bool = False) -> Dict[str, float | List[float]]:
170
- """Resumen por nodo: si normalize, devuelve medias por glifo; si no, lista de corridas."""
171
- hist = ensure_history(G)
172
- rec = hist.get("Tg_by_node", {}).get(n, {})
173
- if not normalize:
174
- # convertir default dict → list para serializar
175
- return {g: list(rec.get(g, [])) for g in GLYPHS_CANONICAL}
176
- out = {}
177
- for g in GLYPHS_CANONICAL:
178
- runs = rec.get(g, [])
179
- out[g] = float(statistics.mean(runs)) if runs else 0.0
180
-
181
- return out
182
-
183
-
184
- def latency_series(G) -> Dict[str, List[float]]:
185
- hist = ensure_history(G)
186
- xs = hist.get("latency_index", [])
187
- return {
188
- "t": [float(x.get("t", i)) for i, x in enumerate(xs)],
189
- "value": [float(x.get("value", 0.0)) for x in xs],
190
- }
191
-
192
-
193
- def glifogram_series(G) -> Dict[str, List[float]]:
194
- hist = ensure_history(G)
195
- xs = hist.get("glifogram", [])
196
- if not xs:
197
- return {"t": []}
198
- out = {"t": [float(x.get("t", i)) for i, x in enumerate(xs)]}
199
- for g in GLYPHS_CANONICAL:
200
- out[g] = [float(x.get(g, 0.0)) for x in xs]
201
- return out
202
-
203
-
204
- def glyph_top(G, k: int = 3) -> List[Tuple[str, float]]:
205
- """Top-k operadores estructurales por Tg_global (fracción)."""
206
- tg = Tg_global(G, normalize=True)
207
- return sorted(tg.items(), key=lambda kv: kv[1], reverse=True)[:max(1, int(k))]
208
-
209
-
210
- def glyph_dwell_stats(G, n) -> Dict[str, Dict[str, float]]:
211
- """Estadísticos por nodo: mean/median/max de corridas por glifo."""
212
- hist = ensure_history(G)
213
- rec = hist.get("Tg_by_node", {}).get(n, {})
214
- out = {}
215
- for g in GLYPHS_CANONICAL:
216
- runs = list(rec.get(g, []))
217
- if not runs:
218
- out[g] = {"mean": 0.0, "median": 0.0, "max": 0.0, "count": 0}
219
- else:
220
- out[g] = {
221
- "mean": float(statistics.mean(runs)),
222
- "median": float(statistics.median(runs)),
223
- "max": float(max(runs)),
224
- "count": int(len(runs)),
225
- }
226
- return out
227
-
228
-
229
- # -----------------------------
230
- # Export history to CSV/JSON
231
- # -----------------------------
232
-
233
- def export_history(G, base_path: str, fmt: str = "csv") -> None:
234
- """Vuelca glifograma y traza σ(t) a archivos CSV o JSON compactos."""
235
- hist = ensure_history(G)
236
- glifo = glifogram_series(G)
237
- sigma_mag = hist.get("sense_sigma_mag", [])
238
- sigma = {
239
- "t": list(range(len(sigma_mag))),
240
- "sigma_x": hist.get("sense_sigma_x", []),
241
- "sigma_y": hist.get("sense_sigma_y", []),
242
- "mag": sigma_mag,
243
- "angle": hist.get("sense_sigma_angle", []),
244
- }
245
- morph = hist.get("morph", [])
246
- epi_supp = hist.get("EPI_support", [])
247
- fmt = fmt.lower()
248
- if fmt == "csv":
249
- with open(base_path + "_glifogram.csv", "w", newline="") as f:
250
- writer = csv.writer(f)
251
- writer.writerow(["t", *GLYPHS_CANONICAL])
252
- ts = glifo.get("t", [])
253
- default_col = [0] * len(ts)
254
- for i, t in enumerate(ts):
255
- row = [t] + [glifo.get(g, default_col)[i] for g in GLYPHS_CANONICAL]
256
- writer.writerow(row)
257
- with open(base_path + "_sigma.csv", "w", newline="") as f:
258
- writer = csv.writer(f)
259
- writer.writerow(["t", "x", "y", "mag", "angle"])
260
- for i, t in enumerate(sigma["t"]):
261
- writer.writerow([t, sigma["sigma_x"][i], sigma["sigma_y"][i], sigma["mag"][i], sigma["angle"][i]])
262
- if morph:
263
- with open(base_path + "_morph.csv", "w", newline="") as f:
264
- writer = csv.writer(f)
265
- writer.writerow(["t", "ID", "CM", "NE", "PP"])
266
- for row in morph:
267
- writer.writerow([row.get("t"), row.get("ID"), row.get("CM"), row.get("NE"), row.get("PP")])
268
- if epi_supp:
269
- with open(base_path + "_epi_support.csv", "w", newline="") as f:
270
- writer = csv.writer(f)
271
- writer.writerow(["t", "size", "norm"])
272
- for row in epi_supp:
273
- writer.writerow([row.get("t"), row.get("size"), row.get("norm")])
274
- else:
275
- data = {"glifogram": glifo, "sigma": sigma, "morph": morph, "epi_support": epi_supp}
276
- with open(base_path + ".json", "w") as f:
277
- json.dump(data, f)
278
-
279
-
280
- # =========================
281
- # COHERENCIA W_ij^t (TNFR)
282
- # =========================
283
-
284
-
285
- def _norm01(x, lo, hi):
286
- if hi <= lo:
287
- return 0.0
288
- v = (float(x) - float(lo)) / (float(hi) - float(lo))
289
- return 0.0 if v < 0 else (1.0 if v > 1.0 else v)
290
-
291
-
292
- def _similarity_abs(a, b, lo, hi):
293
- return 1.0 - _norm01(abs(float(a) - float(b)), 0.0, float(hi - lo) if hi > lo else 1.0)
294
-
295
-
296
- def _coherence_components(G, ni, nj, epi_min, epi_max, vf_min, vf_max):
297
- ndi = G.nodes[ni]
298
- ndj = G.nodes[nj]
299
- th_i = _get_attr(ndi, ALIAS_THETA, 0.0)
300
- th_j = _get_attr(ndj, ALIAS_THETA, 0.0)
301
- s_phase = 0.5 * (1.0 + cos(th_i - th_j))
302
- epi_i = _get_attr(ndi, ALIAS_EPI, 0.0)
303
- epi_j = _get_attr(ndj, ALIAS_EPI, 0.0)
304
- s_epi = _similarity_abs(epi_i, epi_j, epi_min, epi_max)
305
- vf_i = float(_get_attr(ndi, "νf", 0.0))
306
- vf_j = float(_get_attr(ndj, "νf", 0.0))
307
- s_vf = _similarity_abs(vf_i, vf_j, vf_min, vf_max)
308
- si_i = clamp01(float(_get_attr(ndi, "Si", 0.0)))
309
- si_j = clamp01(float(_get_attr(ndj, "Si", 0.0)))
310
- s_si = 1.0 - abs(si_i - si_j)
311
- return s_phase, s_epi, s_vf, s_si
312
-
313
-
314
- def coherence_matrix(G):
315
- cfg = G.graph.get("COHERENCE", DEFAULTS["COHERENCE"])
316
- if not cfg.get("enabled", True):
317
- return None, None
318
-
319
- nodes = list(G.nodes())
320
- n = len(nodes)
321
- if n == 0:
322
- return nodes, []
323
-
324
- # Precompute indices to avoid repeated list.index calls within loops
325
- node_to_index = {node: idx for idx, node in enumerate(nodes)}
326
-
327
- epi_vals = [float(_get_attr(G.nodes[v], ALIAS_EPI, 0.0)) for v in nodes]
328
- vf_vals = [float(_get_attr(G.nodes[v], "νf", 0.0)) for v in nodes]
329
- epi_min, epi_max = min(epi_vals), max(epi_vals)
330
- vf_min, vf_max = min(vf_vals), max(vf_vals)
331
-
332
- wdict = dict(cfg.get("weights", {}))
333
- for k in ("phase", "epi", "vf", "si"):
334
- wdict.setdefault(k, 0.0)
335
- wsum = sum(float(v) for v in wdict.values()) or 1.0
336
- wnorm = {k: float(v) / wsum for k, v in wdict.items()}
337
-
338
- scope = str(cfg.get("scope", "neighbors")).lower()
339
- neighbors_only = scope != "all"
340
- self_diag = bool(cfg.get("self_on_diag", True))
341
- mode = str(cfg.get("store_mode", "sparse")).lower()
342
- thr = float(cfg.get("threshold", 0.0))
343
- if mode not in ("sparse", "dense"):
344
- mode = "sparse"
345
-
346
- if mode == "dense":
347
- W = [[0.0] * n for _ in range(n)]
348
- else:
349
- W = []
350
-
351
- row_sum = [0.0] * n
352
- row_count = [0] * n
353
-
354
- for i, ni in enumerate(nodes):
355
- if self_diag:
356
- if mode == "dense":
357
- W[i][i] = 1.0
358
- else:
359
- W.append((i, i, 1.0))
360
- row_sum[i] += 1.0
361
- row_count[i] += 1
362
-
363
- neighs = G.neighbors(ni) if neighbors_only else nodes
364
- for nj in neighs:
365
- if nj == ni:
366
- continue
367
- j = node_to_index[nj]
368
- s_phase, s_epi, s_vf, s_si = _coherence_components(
369
- G, ni, nj, epi_min, epi_max, vf_min, vf_max
370
- )
371
- wij = (
372
- wnorm["phase"] * s_phase
373
- + wnorm["epi"] * s_epi
374
- + wnorm["vf"] * s_vf
375
- + wnorm["si"] * s_si
376
- )
377
- wij = clamp01(wij)
378
- if mode == "dense":
379
- W[i][j] = wij
380
- else:
381
- if wij >= thr:
382
- W.append((i, j, wij))
383
- row_sum[i] += wij
384
- row_count[i] += 1
385
-
386
- Wi = [row_sum[i] / max(1, row_count[i]) for i in range(n)]
387
- vals = []
388
- if mode == "dense":
389
- for i in range(n):
390
- for j in range(n):
391
- if i == j:
392
- continue
393
- vals.append(W[i][j])
394
- else:
395
- for (i, j, w) in W:
396
- if i == j:
397
- continue
398
- vals.append(w)
399
-
400
- stats = {
401
- "min": min(vals) if vals else 0.0,
402
- "max": max(vals) if vals else 0.0,
403
- "mean": (sum(vals) / len(vals)) if vals else 0.0,
404
- "n_edges": len(vals),
405
- "mode": mode,
406
- "scope": scope,
407
- }
408
-
409
- hist = ensure_history(G)
410
- hist.setdefault(cfg.get("history_key", "W_sparse"), []).append(W)
411
- hist.setdefault(cfg.get("Wi_history_key", "W_i"), []).append(Wi)
412
- hist.setdefault(cfg.get("stats_history_key", "W_stats"), []).append(stats)
413
-
414
- return nodes, W
415
-
416
-
417
- def local_phase_sync_weighted(G, n, nodes_order=None, W_row=None):
418
- import cmath
419
-
420
- cfg = G.graph.get("COHERENCE", DEFAULTS["COHERENCE"])
421
- scope = str(cfg.get("scope", "neighbors")).lower()
422
- neighbors_only = scope != "all"
423
-
424
- if W_row is None or nodes_order is None:
425
- vec = [
426
- cmath.exp(1j * float(_get_attr(G.nodes[v], ALIAS_THETA, 0.0)))
427
- for v in (G.neighbors(n) if neighbors_only else (set(G.nodes()) - {n}))
428
- ]
429
- if not vec:
430
- return 0.0
431
- mean = sum(vec) / len(vec)
432
- return abs(mean)
433
-
434
- i = nodes_order.index(n)
435
- if isinstance(W_row, list) and W_row and isinstance(W_row[0], (int, float)):
436
- weights = W_row
437
- else:
438
- weights = [0.0] * len(nodes_order)
439
- for (ii, jj, w) in W_row:
440
- if ii == i:
441
- weights[jj] = w
442
-
443
- num = 0 + 0j
444
- den = 0.0
445
- for j, nj in enumerate(nodes_order):
446
- if nj == n:
447
- continue
448
- w = weights[j]
449
- den += w
450
- th_j = float(_get_attr(G.nodes[nj], ALIAS_THETA, 0.0))
451
- num += w * cmath.exp(1j * th_j)
452
- return abs(num / den) if den else 0.0
453
-
454
-
455
- def _coherence_step(G, ctx=None):
456
- if not G.graph.get("COHERENCE", DEFAULTS["COHERENCE"]).get("enabled", True):
457
- return
458
- coherence_matrix(G)
459
-
460
-
461
- def register_coherence_callbacks(G) -> None:
462
- register_callback(G, when="after_step", func=_coherence_step, name="coherence_step")
463
-
464
-
465
- # =========================
466
- # DIAGNÓSTICO NODAL (TNFR)
467
- # =========================
468
-
469
-
470
- def _dnfr_norm(nd, dnfr_max):
471
- val = abs(float(_get_attr(nd, ALIAS_DNFR, 0.0)))
472
- if dnfr_max <= 0:
473
- return 0.0
474
- x = val / dnfr_max
475
- return 1.0 if x > 1 else x
476
-
477
-
478
- def _symmetry_index(G, n, k=3, epi_min=None, epi_max=None):
479
- nd = G.nodes[n]
480
- epi_i = float(_get_attr(nd, ALIAS_EPI, 0.0))
481
- vec = list(G.neighbors(n))
482
- if not vec:
483
- return 1.0
484
- epi_bar = fmean(float(_get_attr(G.nodes[v], ALIAS_EPI, epi_i)) for v in vec)
485
- if epi_min is None or epi_max is None:
486
- epis = [float(_get_attr(G.nodes[v], ALIAS_EPI, 0.0)) for v in G.nodes()]
487
- epi_min, epi_max = min(epis), max(epis)
488
- return _similarity_abs(epi_i, epi_bar, epi_min, epi_max)
489
-
490
-
491
- def _state_from_thresholds(Rloc, dnfr_n, cfg):
492
- stb = cfg.get("stable", {"Rloc_hi": 0.8, "dnfr_lo": 0.2, "persist": 3})
493
- dsr = cfg.get("dissonance", {"Rloc_lo": 0.4, "dnfr_hi": 0.5, "persist": 3})
494
- if (Rloc >= float(stb["Rloc_hi"])) and (dnfr_n <= float(stb["dnfr_lo"])):
495
- return "estable"
496
- if (Rloc <= float(dsr["Rloc_lo"])) and (dnfr_n >= float(dsr["dnfr_hi"])):
497
- return "disonante"
498
- return "transicion"
499
-
500
-
501
- def _recommendation(state, cfg):
502
- adv = cfg.get("advice", {})
503
- key = {"estable": "stable", "transicion": "transition", "disonante": "dissonant"}[state]
504
- return list(adv.get(key, []))
505
-
506
-
507
- def _diagnosis_step(G, ctx=None):
508
- dcfg = G.graph.get("DIAGNOSIS", DEFAULTS["DIAGNOSIS"])
509
- if not dcfg.get("enabled", True):
510
- return
511
-
512
- hist = ensure_history(G)
513
- key = dcfg.get("history_key", "nodal_diag")
514
-
515
- dnfr_vals = [abs(float(_get_attr(G.nodes[v], ALIAS_DNFR, 0.0))) for v in G.nodes()]
516
- dnfr_max = max(dnfr_vals) if dnfr_vals else 1.0
517
- epi_vals = [float(_get_attr(G.nodes[v], ALIAS_EPI, 0.0)) for v in G.nodes()]
518
- epi_min, epi_max = (min(epi_vals) if epi_vals else 0.0), (max(epi_vals) if epi_vals else 1.0)
519
-
520
- CfgW = G.graph.get("COHERENCE", DEFAULTS["COHERENCE"])
521
- Wkey = CfgW.get("Wi_history_key", "W_i")
522
- Wm_key = CfgW.get("history_key", "W_sparse")
523
- Wi_series = hist.get(Wkey, [])
524
- Wi_last = Wi_series[-1] if Wi_series else None
525
- Wm_series = hist.get(Wm_key, [])
526
- Wm_last = Wm_series[-1] if Wm_series else None
527
-
528
- nodes = list(G.nodes())
529
- diag = {}
530
- for i, n in enumerate(nodes):
531
- nd = G.nodes[n]
532
- Si = clamp01(float(_get_attr(nd, "Si", 0.0)))
533
- EPI = float(_get_attr(nd, ALIAS_EPI, 0.0))
534
- vf = float(_get_attr(nd, "νf", 0.0))
535
- dnfr_n = _dnfr_norm(nd, dnfr_max)
536
-
537
- Rloc = 0.0
538
- if Wm_last is not None:
539
- if Wm_last and isinstance(Wm_last[0], list):
540
- row = Wm_last[i]
541
- else:
542
- row = Wm_last
543
- Rloc = local_phase_sync_weighted(G, n, nodes_order=nodes, W_row=row)
544
- else:
545
- Rloc = local_phase_sync_weighted(G, n)
546
-
547
- symm = _symmetry_index(G, n, epi_min=epi_min, epi_max=epi_max) if dcfg.get("compute_symmetry", True) else None
548
- state = _state_from_thresholds(Rloc, dnfr_n, dcfg)
549
-
550
- alerts = []
551
- if state == "disonante" and dnfr_n >= float(dcfg.get("dissonance", {}).get("dnfr_hi", 0.5)):
552
- alerts.append("tensión estructural alta")
553
-
554
- advice = _recommendation(state, dcfg)
555
-
556
- rec = {
557
- "node": n,
558
- "Si": Si,
559
- "EPI": EPI,
560
- "νf": vf,
561
- "dnfr_norm": dnfr_n,
562
- "W_i": (Wi_last[i] if (Wi_last and i < len(Wi_last)) else None),
563
- "R_local": Rloc,
564
- "symmetry": symm,
565
- "state": state,
566
- "advice": advice,
567
- "alerts": alerts,
568
- }
569
- diag[n] = rec
570
-
571
- hist.setdefault(key, []).append(diag)
572
-
573
-
574
- def dissonance_events(G, ctx=None):
575
- """Emite eventos de inicio/fin de disonancia estructural por nodo."""
576
- hist = ensure_history(G)
577
- evs = hist.setdefault("events", [])
578
- norms = G.graph.get("_sel_norms", {})
579
- dnfr_max = float(norms.get("dnfr_max", 1.0)) or 1.0
580
- step_idx = len(hist.get("C_steps", []))
581
- for n in G.nodes():
582
- nd = G.nodes[n]
583
- dn = abs(_get_attr(nd, ALIAS_DNFR, 0.0)) / dnfr_max
584
- Rloc = local_phase_sync_weighted(G, n)
585
- st = bool(nd.get("_disr_state", False))
586
- if (not st) and dn >= 0.5 and Rloc <= 0.4:
587
- nd["_disr_state"] = True
588
- evs.append(("disonance_start", {"node": n, "step": step_idx}))
589
- elif st and dn <= 0.2 and Rloc >= 0.7:
590
- nd["_disr_state"] = False
591
- evs.append(("disonance_end", {"node": n, "step": step_idx}))
592
-
593
-
594
- def register_diagnosis_callbacks(G) -> None:
595
- register_callback(G, when="after_step", func=_diagnosis_step, name="diagnosis_step")
596
- register_callback(G, when="after_step", func=dissonance_events, name="dissonance_events")
597
-