tnfr 4.1.0__tar.gz → 4.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. {tnfr-4.1.0/src/tnfr.egg-info → tnfr-4.3.0}/PKG-INFO +10 -4
  2. {tnfr-4.1.0 → tnfr-4.3.0}/README.md +7 -1
  3. {tnfr-4.1.0 → tnfr-4.3.0}/pyproject.toml +4 -4
  4. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/__init__.py +13 -11
  5. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/cli.py +85 -8
  6. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/constants.py +21 -17
  7. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/dynamics.py +64 -7
  8. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/gamma.py +18 -8
  9. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/helpers.py +21 -0
  10. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/metrics.py +50 -27
  11. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/operators.py +43 -21
  12. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/presets.py +3 -0
  13. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/scenarios.py +9 -3
  14. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/sense.py +6 -21
  15. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/trace.py +4 -20
  16. {tnfr-4.1.0 → tnfr-4.3.0/src/tnfr.egg-info}/PKG-INFO +10 -4
  17. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr.egg-info/SOURCES.txt +2 -0
  18. tnfr-4.3.0/tests/test_canon.py +30 -0
  19. {tnfr-4.1.0 → tnfr-4.3.0}/tests/test_edge_cases.py +1 -0
  20. tnfr-4.3.0/tests/test_history_series.py +27 -0
  21. {tnfr-4.1.0 → tnfr-4.3.0}/tests/test_invariants.py +5 -0
  22. {tnfr-4.1.0 → tnfr-4.3.0}/tests/test_remesh.py +19 -0
  23. {tnfr-4.1.0 → tnfr-4.3.0}/LICENSE.md +0 -0
  24. {tnfr-4.1.0 → tnfr-4.3.0}/setup.cfg +0 -0
  25. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/grammar.py +0 -0
  26. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/main.py +0 -0
  27. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/observers.py +0 -0
  28. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/ontosim.py +0 -0
  29. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/program.py +0 -0
  30. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr/types.py +0 -0
  31. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr.egg-info/dependency_links.txt +0 -0
  32. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr.egg-info/entry_points.txt +0 -0
  33. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr.egg-info/requires.txt +0 -0
  34. {tnfr-4.1.0 → tnfr-4.3.0}/src/tnfr.egg-info/top_level.txt +0 -0
  35. {tnfr-4.1.0 → tnfr-4.3.0}/tests/test_cli_sanity.py +0 -0
  36. {tnfr-4.1.0 → tnfr-4.3.0}/tests/test_gamma.py +0 -0
  37. {tnfr-4.1.0 → tnfr-4.3.0}/tests/test_grammar.py +0 -0
  38. {tnfr-4.1.0 → tnfr-4.3.0}/tests/test_history.py +0 -0
  39. {tnfr-4.1.0 → tnfr-4.3.0}/tests/test_program.py +0 -0
@@ -1,12 +1,12 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tnfr
3
- Version: 4.1.0
4
- Summary: TNFR canónica: dinámica glífica modular sobre redes.
3
+ Version: 4.3.0
4
+ Summary: Canonical TNFR: modular glyph-based dynamics on networks.
5
5
  Author: fmg
6
6
  License: MIT
7
7
  Project-URL: Homepage, https://pypi.org/project/tnfr/
8
8
  Project-URL: Repository, https://github.com/fermga/Teoria-de-la-naturaleza-fractal-resonante-TNFR-
9
- Keywords: TNFR,fractal resonante,resonancia,glifos,networkx,dinámica,coherencia,EPI,Kuramoto
9
+ Keywords: TNFR,resonant fractal,resonance,glyphs,networkx,dynamics,coherence,EPI,Kuramoto
10
10
  Classifier: Programming Language :: Python :: 3
11
11
  Classifier: Programming Language :: Python :: 3 :: Only
12
12
  Classifier: Programming Language :: Python :: 3.9
@@ -25,7 +25,13 @@ License-File: LICENSE.md
25
25
  Requires-Dist: networkx>=2.6
26
26
  Dynamic: license-file
27
27
 
28
- # General Project Structure
28
+ # TNFR Canonical Glyph-Based Dynamics
29
+
30
+ Reference implementation of the Resonant Fractal Nature Theory (TNFR).
31
+ It models glyph-driven dynamics on NetworkX graphs, providing a modular
32
+ engine to simulate coherent reorganization processes.
33
+
34
+ ## General Project Structure
29
35
 
30
36
  * **Package entry point.** `__init__.py` registers modules under short names to avoid circular imports and exposes the public API: `preparar_red`, `step`, `run`, and observation utilities.
31
37
 
@@ -1,4 +1,10 @@
1
- # General Project Structure
1
+ # TNFR Canonical Glyph-Based Dynamics
2
+
3
+ Reference implementation of the Resonant Fractal Nature Theory (TNFR).
4
+ It models glyph-driven dynamics on NetworkX graphs, providing a modular
5
+ engine to simulate coherent reorganization processes.
6
+
7
+ ## General Project Structure
2
8
 
3
9
  * **Package entry point.** `__init__.py` registers modules under short names to avoid circular imports and exposes the public API: `preparar_red`, `step`, `run`, and observation utilities.
4
10
 
@@ -1,14 +1,14 @@
1
1
  [project]
2
2
  name = "tnfr"
3
- version = "4.1.0"
4
- description = "TNFR canónica: dinámica glífica modular sobre redes."
3
+ version = "4.3.0"
4
+ description = "Canonical TNFR: modular glyph-based dynamics on networks."
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.9"
7
7
  license = { text = "MIT" }
8
8
  authors = [{ name = "fmg" }]
9
9
  keywords = [
10
- "TNFR", "fractal resonante", "resonancia", "glifos",
11
- "networkx", "dinámica", "coherencia", "EPI", "Kuramoto"
10
+ "TNFR", "resonant fractal", "resonance", "glyphs",
11
+ "networkx", "dynamics", "coherence", "EPI", "Kuramoto"
12
12
  ]
13
13
  classifiers = [
14
14
  "Programming Language :: Python :: 3",
@@ -8,10 +8,10 @@ Ecuación nodal:
8
8
  ∂EPI/∂t = νf · ΔNFR(t)
9
9
  """
10
10
 
11
- __version__ = "4.1.0"
11
+ __version__ = "4.3.0"
12
12
 
13
13
  # Re-exports de la API pública
14
- from .dynamics import step, run, set_delta_nfr_hook
14
+ from .dynamics import step, run, set_delta_nfr_hook, validate_canon
15
15
  from .ontosim import preparar_red
16
16
  from .observers import attach_standard_observer, coherencia_global, orden_kuramoto
17
17
  from .gamma import GAMMA_REGISTRY, eval_gamma, kuramoto_R_psi
@@ -22,12 +22,12 @@ from .sense import (
22
22
  push_sigma_snapshot, sigma_series, sigma_rose,
23
23
  register_sigma_callback,
24
24
  )
25
- from .metrics import (
26
- register_metrics_callbacks,
27
- Tg_global, Tg_by_node,
28
- latency_series, glifogram_series,
29
- glyph_top, glyph_dwell_stats,
30
- )
25
+ from .metrics import (
26
+ register_metrics_callbacks,
27
+ Tg_global, Tg_by_node,
28
+ latency_series, glifogram_series,
29
+ glyph_top, glyph_dwell_stats, export_history,
30
+ )
31
31
  from .trace import register_trace
32
32
  from .program import play, seq, block, target, wait, THOL, TARGET, WAIT, ejemplo_canonico_basico
33
33
  from .cli import main as cli_main
@@ -37,8 +37,8 @@ from .types import NodeState
37
37
 
38
38
 
39
39
  __all__ = [
40
- "preparar_red",
41
- "step", "run", "set_delta_nfr_hook",
40
+ "preparar_red",
41
+ "step", "run", "set_delta_nfr_hook", "validate_canon",
42
42
 
43
43
  "attach_standard_observer", "coherencia_global", "orden_kuramoto",
44
44
  "GAMMA_REGISTRY", "eval_gamma", "kuramoto_R_psi",
@@ -51,9 +51,11 @@ __all__ = [
51
51
  "register_trace",
52
52
  "Tg_global", "Tg_by_node",
53
53
  "latency_series", "glifogram_series",
54
- "glyph_top", "glyph_dwell_stats",
54
+ "glyph_top", "glyph_dwell_stats",
55
+ "export_history",
55
56
  "play", "seq", "block", "target", "wait", "THOL", "TARGET", "WAIT",
56
57
  "cli_main", "build_graph", "get_preset", "NodeState",
57
58
  "ejemplo_canonico_basico",
58
59
  "__version__",
59
60
  ]
61
+
@@ -18,10 +18,11 @@ from .metrics import (
18
18
  latency_series,
19
19
  glifogram_series,
20
20
  glyph_top,
21
+ export_history,
21
22
  )
22
23
  from .trace import register_trace
23
24
  from .program import play, seq, block, wait, target
24
- from .dynamics import step, _update_history, default_glyph_selector, parametric_glyph_selector
25
+ from .dynamics import step, _update_history, default_glyph_selector, parametric_glyph_selector, validate_canon
25
26
  from .gamma import GAMMA_REGISTRY
26
27
  from .scenarios import build_graph
27
28
  from .presets import get_preset
@@ -32,6 +33,24 @@ def _save_json(path: str, data: Any) -> None:
32
33
  json.dump(data, f, ensure_ascii=False, indent=2)
33
34
 
34
35
 
36
+ def _str2bool(s: str) -> bool:
37
+ s = s.lower()
38
+ if s in {"true", "1", "yes", "y"}:
39
+ return True
40
+ if s in {"false", "0", "no", "n"}:
41
+ return False
42
+ raise argparse.ArgumentTypeError("expected true/false")
43
+
44
+
45
+ def _args_to_dict(args: argparse.Namespace, prefix: str) -> Dict[str, Any]:
46
+ out: Dict[str, Any] = {}
47
+ pref = prefix.replace(".", "_")
48
+ for k, v in vars(args).items():
49
+ if k.startswith(pref) and v is not None:
50
+ out[k[len(pref):]] = v
51
+ return out
52
+
53
+
35
54
  def _load_sequence(path: str) -> List[Any]:
36
55
  with open(path, "r", encoding="utf-8") as f:
37
56
  text = f.read()
@@ -73,9 +92,20 @@ def _attach_callbacks(G: nx.Graph) -> None:
73
92
  def cmd_run(args: argparse.Namespace) -> int:
74
93
  G = build_graph(n=args.nodes, topology=args.topology, seed=args.seed)
75
94
  _attach_callbacks(G)
76
- G.graph.setdefault("GRAMMAR_CANON", DEFAULTS["GRAMMAR_CANON"]).update({"enabled": bool(args.grammar_canon)})
95
+ validate_canon(G)
96
+ gcanon = dict(DEFAULTS["GRAMMAR_CANON"])
97
+ gcanon.update(_args_to_dict(args, prefix="grammar."))
98
+ if hasattr(args, "grammar_canon") and args.grammar_canon is not None:
99
+ gcanon["enabled"] = bool(args.grammar_canon)
100
+ G.graph.setdefault("GRAMMAR_CANON", {}).update(gcanon)
101
+ if args.glyph_hysteresis_window is not None:
102
+ G.graph["GLYPH_HYSTERESIS_WINDOW"] = int(args.glyph_hysteresis_window)
77
103
  G.graph["glyph_selector"] = default_glyph_selector if args.selector == "basic" else parametric_glyph_selector
78
- G.graph["GAMMA"] = {"type": args.gamma}
104
+ G.graph["GAMMA"] = {
105
+ "type": args.gamma_type,
106
+ "beta": args.gamma_beta,
107
+ "R0": args.gamma_R0,
108
+ }
79
109
 
80
110
  if args.preset:
81
111
  program = get_preset(args.preset)
@@ -87,6 +117,8 @@ def cmd_run(args: argparse.Namespace) -> int:
87
117
 
88
118
  if args.save_history:
89
119
  _save_json(args.save_history, G.graph.get("history", {}))
120
+ if args.export_history_base:
121
+ export_history(G, args.export_history_base, fmt=args.export_format)
90
122
 
91
123
  if args.summary:
92
124
  tg = Tg_global(G, normalize=True)
@@ -100,9 +132,20 @@ def cmd_run(args: argparse.Namespace) -> int:
100
132
  def cmd_sequence(args: argparse.Namespace) -> int:
101
133
  G = build_graph(n=args.nodes, topology=args.topology, seed=args.seed)
102
134
  _attach_callbacks(G)
103
- G.graph.setdefault("GRAMMAR_CANON", DEFAULTS["GRAMMAR_CANON"]).update({"enabled": bool(args.grammar_canon)})
135
+ validate_canon(G)
136
+ gcanon = dict(DEFAULTS["GRAMMAR_CANON"])
137
+ gcanon.update(_args_to_dict(args, prefix="grammar."))
138
+ if hasattr(args, "grammar_canon") and args.grammar_canon is not None:
139
+ gcanon["enabled"] = bool(args.grammar_canon)
140
+ G.graph.setdefault("GRAMMAR_CANON", {}).update(gcanon)
141
+ if args.glyph_hysteresis_window is not None:
142
+ G.graph["GLYPH_HYSTERESIS_WINDOW"] = int(args.glyph_hysteresis_window)
104
143
  G.graph["glyph_selector"] = default_glyph_selector if args.selector == "basic" else parametric_glyph_selector
105
- G.graph["GAMMA"] = {"type": args.gamma}
144
+ G.graph["GAMMA"] = {
145
+ "type": args.gamma_type,
146
+ "beta": args.gamma_beta,
147
+ "R0": args.gamma_R0,
148
+ }
106
149
 
107
150
  if args.preset:
108
151
  program = get_preset(args.preset)
@@ -115,15 +158,22 @@ def cmd_sequence(args: argparse.Namespace) -> int:
115
158
 
116
159
  if args.save_history:
117
160
  _save_json(args.save_history, G.graph.get("history", {}))
161
+ if args.export_history_base:
162
+ export_history(G, args.export_history_base, fmt=args.export_format)
118
163
  return 0
119
164
 
120
165
 
121
166
  def cmd_metrics(args: argparse.Namespace) -> int:
122
167
  G = build_graph(n=args.nodes, topology=args.topology, seed=args.seed)
123
168
  _attach_callbacks(G)
169
+ validate_canon(G)
124
170
  G.graph.setdefault("GRAMMAR_CANON", DEFAULTS["GRAMMAR_CANON"]).update({"enabled": bool(args.grammar_canon)})
125
171
  G.graph["glyph_selector"] = default_glyph_selector if args.selector == "basic" else parametric_glyph_selector
126
- G.graph["GAMMA"] = {"type": args.gamma}
172
+ G.graph["GAMMA"] = {
173
+ "type": args.gamma_type,
174
+ "beta": args.gamma_beta,
175
+ "R0": args.gamma_R0,
176
+ }
127
177
  for _ in range(int(args.steps or 200)):
128
178
  step(G)
129
179
 
@@ -156,10 +206,22 @@ def main(argv: Optional[List[str]] = None) -> int:
156
206
  p_run.add_argument("--seed", type=int, default=1)
157
207
  p_run.add_argument("--preset", type=str, default=None)
158
208
  p_run.add_argument("--save-history", dest="save_history", type=str, default=None)
209
+ p_run.add_argument("--export-history-base", dest="export_history_base", type=str, default=None)
210
+ p_run.add_argument("--export-format", dest="export_format", choices=["csv", "json"], default="json")
159
211
  p_run.add_argument("--summary", action="store_true")
160
212
  p_run.add_argument("--no-canon", dest="grammar_canon", action="store_false", default=True, help="Desactiva gramática canónica")
213
+ p_run.add_argument("--grammar.enabled", dest="grammar_enabled", type=_str2bool, default=None)
214
+ p_run.add_argument("--grammar.zhir_requires_oz_window", dest="grammar_zhir_requires_oz_window", type=int, default=None)
215
+ p_run.add_argument("--grammar.zhir_dnfr_min", dest="grammar_zhir_dnfr_min", type=float, default=None)
216
+ p_run.add_argument("--grammar.thol_min_len", dest="grammar_thol_min_len", type=int, default=None)
217
+ p_run.add_argument("--grammar.thol_max_len", dest="grammar_thol_max_len", type=int, default=None)
218
+ p_run.add_argument("--grammar.thol_close_dnfr", dest="grammar_thol_close_dnfr", type=float, default=None)
219
+ p_run.add_argument("--grammar.si_high", dest="grammar_si_high", type=float, default=None)
220
+ p_run.add_argument("--glyph.hysteresis_window", dest="glyph_hysteresis_window", type=int, default=None)
161
221
  p_run.add_argument("--selector", choices=["basic", "param"], default="basic")
162
- p_run.add_argument("--gamma", choices=list(GAMMA_REGISTRY.keys()), default="none")
222
+ p_run.add_argument("--gamma-type", choices=list(GAMMA_REGISTRY.keys()), default="none")
223
+ p_run.add_argument("--gamma-beta", type=float, default=0.0)
224
+ p_run.add_argument("--gamma-R0", type=float, default=0.0)
163
225
  p_run.set_defaults(func=cmd_run)
164
226
 
165
227
  p_seq = sub.add_parser("sequence", help="Ejecutar una secuencia (preset o YAML/JSON)")
@@ -169,6 +231,19 @@ def main(argv: Optional[List[str]] = None) -> int:
169
231
  p_seq.add_argument("--preset", type=str, default=None)
170
232
  p_seq.add_argument("--sequence-file", type=str, default=None)
171
233
  p_seq.add_argument("--save-history", dest="save_history", type=str, default=None)
234
+ p_seq.add_argument("--export-history-base", dest="export_history_base", type=str, default=None)
235
+ p_seq.add_argument("--export-format", dest="export_format", choices=["csv", "json"], default="json")
236
+ p_seq.add_argument("--gamma-type", choices=list(GAMMA_REGISTRY.keys()), default="none")
237
+ p_seq.add_argument("--gamma-beta", type=float, default=0.0)
238
+ p_seq.add_argument("--gamma-R0", type=float, default=0.0)
239
+ p_seq.add_argument("--grammar.enabled", dest="grammar_enabled", type=_str2bool, default=None)
240
+ p_seq.add_argument("--grammar.zhir_requires_oz_window", dest="grammar_zhir_requires_oz_window", type=int, default=None)
241
+ p_seq.add_argument("--grammar.zhir_dnfr_min", dest="grammar_zhir_dnfr_min", type=float, default=None)
242
+ p_seq.add_argument("--grammar.thol_min_len", dest="grammar_thol_min_len", type=int, default=None)
243
+ p_seq.add_argument("--grammar.thol_max_len", dest="grammar_thol_max_len", type=int, default=None)
244
+ p_seq.add_argument("--grammar.thol_close_dnfr", dest="grammar_thol_close_dnfr", type=float, default=None)
245
+ p_seq.add_argument("--grammar.si_high", dest="grammar_si_high", type=float, default=None)
246
+ p_seq.add_argument("--glyph.hysteresis_window", dest="glyph_hysteresis_window", type=int, default=None)
172
247
  p_seq.set_defaults(func=cmd_sequence)
173
248
 
174
249
  p_met = sub.add_parser("metrics", help="Correr breve y volcar métricas clave")
@@ -178,7 +253,9 @@ def main(argv: Optional[List[str]] = None) -> int:
178
253
  p_met.add_argument("--seed", type=int, default=1)
179
254
  p_met.add_argument("--no-canon", dest="grammar_canon", action="store_false", default=True, help="Desactiva gramática canónica")
180
255
  p_met.add_argument("--selector", choices=["basic", "param"], default="basic")
181
- p_met.add_argument("--gamma", choices=list(GAMMA_REGISTRY.keys()), default="none")
256
+ p_met.add_argument("--gamma-type", choices=list(GAMMA_REGISTRY.keys()), default="none")
257
+ p_met.add_argument("--gamma-beta", type=float, default=0.0)
258
+ p_met.add_argument("--gamma-R0", type=float, default=0.0)
182
259
  p_met.add_argument("--save", type=str, default=None)
183
260
  p_met.set_defaults(func=cmd_metrics)
184
261
 
@@ -76,14 +76,19 @@ DEFAULTS: Dict[str, Any] = {
76
76
  "REMESH_COOLDOWN_VENTANA": 20, # pasos mínimos entre RE’MESH
77
77
  "REMESH_COOLDOWN_TS": 0.0, # cooldown adicional por tiempo simulado
78
78
  # Gating adicional basado en observadores (conmutador + ventana)
79
- "REMESH_REQUIRE_STABILITY": False, # si True, exige ventana de estabilidad multi-métrica
80
- "REMESH_STABILITY_WINDOW": 25, # tamaño de ventana para evaluar estabilidad
81
- "REMESH_MIN_PHASE_SYNC": 0.85, # media mínima de sincronía de fase en ventana
82
- "REMESH_MAX_GLYPH_DISR": 0.35, # media máxima de carga glífica disruptiva en ventana
83
- "REMESH_LOG_EVENTS": True, # guarda eventos y metadatos del RE’MESH
84
-
85
- # RE’MESH: memoria τ y mezcla α
86
- "REMESH_TAU": 8, # pasos hacia atrás
79
+ "REMESH_REQUIRE_STABILITY": False, # si True, exige ventana de estabilidad multi-métrica
80
+ "REMESH_STABILITY_WINDOW": 25, # tamaño de ventana para evaluar estabilidad
81
+ "REMESH_MIN_PHASE_SYNC": 0.85, # media mínima de sincronía de fase en ventana
82
+ "REMESH_MAX_GLYPH_DISR": 0.35, # media máxima de carga glífica disruptiva en ventana
83
+ "REMESH_MIN_SIGMA_MAG": 0.50, # magnitud mínima de σ en ventana
84
+ "REMESH_MIN_KURAMOTO_R": 0.80, # R de Kuramoto mínimo en ventana
85
+ "REMESH_MIN_SI_HI_FRAC": 0.50, # fracción mínima de nodos con Si alto
86
+ "REMESH_LOG_EVENTS": True, # guarda eventos y metadatos del RE’MESH
87
+
88
+ # RE’MESH: memoria τ y mezcla α (global/local)
89
+ "REMESH_TAU": 8, # compatibilidad: tau global por defecto
90
+ "REMESH_TAU_GLOBAL": 8, # pasos hacia atrás (escala global)
91
+ "REMESH_TAU_LOCAL": 4, # pasos hacia atrás (escala local)
87
92
  "REMESH_ALPHA": 0.5, # mezcla con pasado
88
93
  "REMESH_ALPHA_HARD": False, # si True ignora GLYPH_FACTORS['REMESH_alpha']
89
94
 
@@ -177,11 +182,7 @@ DEFAULTS.setdefault("GRAMMAR_CANON", {
177
182
 
178
183
  def attach_defaults(G, override: bool = False) -> None:
179
184
  """Escribe DEFAULTS en G.graph (sin sobreescribir si override=False)."""
180
- G.graph.setdefault("_tnfr_defaults_attached", False)
181
- for k, v in DEFAULTS.items():
182
- if override or k not in G.graph:
183
- G.graph[k] = v
184
- G.graph["_tnfr_defaults_attached"] = True
185
+ inject_defaults(G, DEFAULTS, override=override)
185
186
 
186
187
 
187
188
  def inject_defaults(G, defaults: Dict[str, Any] = DEFAULTS, override: bool = False) -> None:
@@ -210,7 +211,10 @@ def merge_overrides(G, **overrides) -> None:
210
211
  ALIAS_VF = ("νf", "nu_f", "nu-f", "nu", "freq", "frequency")
211
212
  ALIAS_THETA = ("θ", "theta", "fase", "phi", "phase")
212
213
  ALIAS_DNFR = ("ΔNFR", "delta_nfr", "dnfr")
213
- ALIAS_EPI = ("EPI", "psi", "PSI", "value")
214
- ALIAS_SI = ("Si", "sense_index", "S_i", "sense", "meaning_index")
215
- ALIAS_dEPI = ("dEPI_dt", "dpsi_dt", "dEPI", "velocity")
216
- ALIAS_D2EPI = ("d2EPI_dt2", "d2psi_dt2", "d2EPI", "accel")
214
+ ALIAS_EPI = ("EPI", "psi", "PSI", "value")
215
+ ALIAS_SI = ("Si", "sense_index", "S_i", "sense", "meaning_index")
216
+ ALIAS_dEPI = ("dEPI_dt", "dpsi_dt", "dEPI", "velocity")
217
+ ALIAS_D2EPI = ("d2EPI_dt2", "d2psi_dt2", "d2EPI", "accel")
218
+ ALIAS_dVF = ("dνf_dt", "dvf_dt", "dnu_dt", "dvf")
219
+ ALIAS_D2VF = ("d2νf_dt2", "d2vf_dt2", "d2nu_dt2", "B")
220
+ ALIAS_dSI = ("δSi", "delta_Si", "dSi")
@@ -19,7 +19,11 @@ import networkx as nx
19
19
  from .observers import sincronía_fase, carga_glifica, orden_kuramoto, sigma_vector
20
20
  from .operators import aplicar_remesh_si_estabilizacion_global
21
21
  from .grammar import select_and_apply_with_grammar
22
- from .constants import DEFAULTS, ALIAS_VF, ALIAS_THETA, ALIAS_DNFR, ALIAS_EPI, ALIAS_SI, ALIAS_dEPI, ALIAS_D2EPI
22
+ from .constants import (
23
+ DEFAULTS,
24
+ ALIAS_VF, ALIAS_THETA, ALIAS_DNFR, ALIAS_EPI, ALIAS_SI,
25
+ ALIAS_dEPI, ALIAS_D2EPI, ALIAS_dVF, ALIAS_D2VF, ALIAS_dSI,
26
+ )
23
27
  from .gamma import eval_gamma
24
28
  from .helpers import (
25
29
  clamp, clamp01, list_mean, phase_distance,
@@ -212,6 +216,17 @@ def aplicar_clamps_canonicos(nd: Dict[str, Any], G=None, node=None) -> None:
212
216
  _set_attr(nd, ALIAS_THETA, ((th + math.pi) % (2*math.pi) - math.pi))
213
217
 
214
218
 
219
+ def validate_canon(G) -> None:
220
+ """Aplica clamps canónicos a todos los nodos de ``G``.
221
+
222
+ Envuelve fase y restringe ``EPI`` y ``νf`` a los rangos en ``G.graph``.
223
+ Si ``VALIDATORS_STRICT`` está activo, registra alertas en ``history``.
224
+ """
225
+ for n in G.nodes():
226
+ aplicar_clamps_canonicos(G.nodes[n], G, n)
227
+ return G
228
+
229
+
215
230
  def coordinar_fase_global_vecinal(G, fuerza_global: float | None = None, fuerza_vecinal: float | None = None) -> None:
216
231
  """
217
232
  Ajusta fase con mezcla GLOBAL+VECINAL.
@@ -425,6 +440,19 @@ def parametric_glyph_selector(G, n) -> str:
425
440
  prev = list(hist)[-1]
426
441
  if isinstance(prev, str) and prev in ("I’L","O’Z","Z’HIR","T’HOL","NA’V","R’A"):
427
442
  return prev
443
+
444
+ # Penalización por falta de avance en σ/Si si se repite glifo
445
+ prev = None
446
+ hist_prev = nd.get("hist_glifos")
447
+ if hist_prev:
448
+ prev = list(hist_prev)[-1]
449
+ if prev == cand:
450
+ delta_si = _get_attr(nd, ALIAS_dSI, 0.0)
451
+ h = G.graph.get("history", {})
452
+ sig = h.get("sense_sigma_mag", [])
453
+ delta_sigma = sig[-1] - sig[-2] if len(sig) >= 2 else 0.0
454
+ if delta_si <= 0.0 and delta_sigma <= 0.0:
455
+ score -= 0.05
428
456
 
429
457
  # Override suave guiado por score (solo si NO cayó la histéresis arriba)
430
458
  # Regla: score>=0.66 inclina a I’L; score<=0.33 inclina a O’Z/Z’HIR
@@ -487,7 +515,9 @@ def step(G, *, dt: float | None = None, use_Si: bool = True, apply_glyphs: bool
487
515
  # 7) Observadores ligeros
488
516
  _update_history(G)
489
517
  # dynamics.py — dentro de step(), justo antes del punto 8)
490
- tau = int(G.graph.get("REMESH_TAU", DEFAULTS["REMESH_TAU"]))
518
+ tau_g = int(G.graph.get("REMESH_TAU_GLOBAL", G.graph.get("REMESH_TAU", DEFAULTS["REMESH_TAU_GLOBAL"])))
519
+ tau_l = int(G.graph.get("REMESH_TAU_LOCAL", G.graph.get("REMESH_TAU", DEFAULTS["REMESH_TAU_LOCAL"])))
520
+ tau = max(tau_g, tau_l)
491
521
  maxlen = max(2 * tau + 5, 64)
492
522
  epi_hist = G.graph.get("_epi_hist")
493
523
  if not isinstance(epi_hist, deque) or epi_hist.maxlen != maxlen:
@@ -528,11 +558,12 @@ def run(G, steps: int, *, dt: float | None = None, use_Si: bool = True, apply_gl
528
558
  # -------------------------
529
559
 
530
560
  def _update_history(G) -> None:
531
- hist = G.graph.setdefault("history", {
532
- "C_steps": [], "stable_frac": [],
533
- "phase_sync": [], "glyph_load_estab": [], "glyph_load_disr": [],
534
- "Si_mean": [], "Si_hi_frac": [], "Si_lo_frac": []
535
- })
561
+ hist = G.graph.setdefault("history", {})
562
+ for k in (
563
+ "C_steps", "stable_frac", "phase_sync", "glyph_load_estab", "glyph_load_disr",
564
+ "Si_mean", "Si_hi_frac", "Si_lo_frac", "delta_Si", "B"
565
+ ):
566
+ hist.setdefault(k, [])
536
567
 
537
568
  # Proxy de coherencia C(t)
538
569
  dnfr_mean = list_mean(abs(_get_attr(G.nodes[n], ALIAS_DNFR, 0.0)) for n in G.nodes())
@@ -552,11 +583,37 @@ def _update_history(G) -> None:
552
583
  eps_depi = float(G.graph.get("EPS_DEPI_STABLE", DEFAULTS["EPS_DEPI_STABLE"]))
553
584
  stables = 0
554
585
  total = max(1, G.number_of_nodes())
586
+ dt = float(G.graph.get("DT", DEFAULTS.get("DT", 1.0))) or 1.0
587
+ delta_si_acc = []
588
+ B_acc = []
555
589
  for n in G.nodes():
556
590
  nd = G.nodes[n]
557
591
  if abs(_get_attr(nd, ALIAS_DNFR, 0.0)) <= eps_dnfr and abs(_get_attr(nd, ALIAS_dEPI, 0.0)) <= eps_depi:
558
592
  stables += 1
593
+
594
+ # δSi por nodo
595
+ Si_curr = _get_attr(nd, ALIAS_SI, 0.0)
596
+ Si_prev = nd.get("_prev_Si", Si_curr)
597
+ dSi = Si_curr - Si_prev
598
+ nd["_prev_Si"] = Si_curr
599
+ _set_attr(nd, ALIAS_dSI, dSi)
600
+ delta_si_acc.append(dSi)
601
+
602
+ # Bifurcación B = ∂²νf/∂t²
603
+ vf_curr = _get_attr(nd, ALIAS_VF, 0.0)
604
+ vf_prev = nd.get("_prev_vf", vf_curr)
605
+ dvf_dt = (vf_curr - vf_prev) / dt
606
+ dvf_prev = nd.get("_prev_dvf", dvf_dt)
607
+ B = (dvf_dt - dvf_prev) / dt
608
+ nd["_prev_vf"] = vf_curr
609
+ nd["_prev_dvf"] = dvf_dt
610
+ _set_attr(nd, ALIAS_dVF, dvf_dt)
611
+ _set_attr(nd, ALIAS_D2VF, B)
612
+ B_acc.append(B)
613
+
559
614
  hist["stable_frac"].append(stables/total)
615
+ hist["delta_Si"].append(list_mean(delta_si_acc, 0.0))
616
+ hist["B"].append(list_mean(B_acc, 0.0))
560
617
  # --- nuevas series: sincronía de fase y carga glífica ---
561
618
  try:
562
619
  ps = sincronía_fase(G) # [0,1], más alto = más en fase
@@ -23,14 +23,7 @@ import math
23
23
  import cmath
24
24
 
25
25
  from .constants import ALIAS_THETA
26
-
27
-
28
- def _get_attr(nd: Dict[str, Any], aliases, default: float = 0.0) -> float:
29
- """Obtiene el primer atributo presente en nd según aliases."""
30
- for k in aliases:
31
- if k in nd:
32
- return nd[k]
33
- return default
26
+ from .helpers import _get_attr
34
27
 
35
28
 
36
29
  def kuramoto_R_psi(G) -> Tuple[float, float]:
@@ -83,10 +76,27 @@ def gamma_kuramoto_bandpass(G, node, t, cfg: Dict[str, Any]) -> float:
83
76
  return beta * R * (1.0 - R) * sgn
84
77
 
85
78
 
79
+ def gamma_kuramoto_tanh(G, node, t, cfg: Dict[str, Any]) -> float:
80
+ """Acoplamiento saturante tipo tanh para Γi(R).
81
+
82
+ Fórmula: Γ = β · tanh(k·(R - R0)) · cos(θ_i - ψ)
83
+ - β: ganancia del acoplamiento
84
+ - k: pendiente de la tanh (cuán rápido satura)
85
+ - R0: umbral de activación
86
+ """
87
+ beta = float(cfg.get("beta", 0.0))
88
+ k = float(cfg.get("k", 1.0))
89
+ R0 = float(cfg.get("R0", 0.0))
90
+ R, psi = kuramoto_R_psi(G)
91
+ th_i = _get_attr(G.nodes[node], ALIAS_THETA, 0.0)
92
+ return beta * math.tanh(k * (R - R0)) * math.cos(th_i - psi)
93
+
94
+
86
95
  GAMMA_REGISTRY = {
87
96
  "none": gamma_none,
88
97
  "kuramoto_linear": gamma_kuramoto_linear,
89
98
  "kuramoto_bandpass": gamma_kuramoto_bandpass,
99
+ "kuramoto_tanh": gamma_kuramoto_tanh,
90
100
  }
91
101
 
92
102
 
@@ -124,6 +124,27 @@ def reciente_glifo(nd: Dict[str, Any], glifo: str, ventana: int) -> bool:
124
124
  break
125
125
  return False
126
126
 
127
+ # -------------------------
128
+ # Utilidades de historial global
129
+ # -------------------------
130
+
131
+ def ensure_history(G) -> Dict[str, Any]:
132
+ """Garantiza G.graph['history'] y la devuelve."""
133
+ if "history" not in G.graph:
134
+ G.graph["history"] = {}
135
+ return G.graph["history"]
136
+
137
+
138
+ def last_glifo(nd: Dict[str, Any]) -> str | None:
139
+ """Retorna el glifo más reciente del nodo o ``None``."""
140
+ hist = nd.get("hist_glifos")
141
+ if not hist:
142
+ return None
143
+ try:
144
+ return list(hist)[-1]
145
+ except Exception:
146
+ return None
147
+
127
148
  # -------------------------
128
149
  # Callbacks Γ(R)
129
150
  # -------------------------
@@ -2,9 +2,11 @@ from __future__ import annotations
2
2
  from typing import Dict, Any, List, Tuple
3
3
  from collections import defaultdict, Counter
4
4
  import statistics
5
+ import csv
6
+ import json
5
7
 
6
8
  from .constants import DEFAULTS
7
- from .helpers import _get_attr, clamp01, register_callback
9
+ from .helpers import register_callback, ensure_history, last_glifo
8
10
  from .sense import GLYPHS_CANONICAL
9
11
 
10
12
  # -------------
@@ -16,25 +18,12 @@ DEFAULTS.setdefault("METRICS", {
16
18
  "normalize_series": False # glifograma normalizado a fracción por paso
17
19
  })
18
20
 
21
+
22
+
19
23
  # -------------
20
24
  # Utilidades internas
21
25
  # -------------
22
26
 
23
- def _ensure_history(G):
24
- if "history" not in G.graph:
25
- G.graph["history"] = {}
26
- return G.graph["history"]
27
-
28
-
29
- def _last_glifo(nd: Dict[str, Any]) -> str | None:
30
- hist = nd.get("hist_glifos")
31
- if not hist:
32
- return None
33
- try:
34
- return list(hist)[-1]
35
- except Exception:
36
- return None
37
-
38
27
 
39
28
  # -------------
40
29
  # Estado nodal para Tg
@@ -44,10 +33,7 @@ def _tg_state(nd: Dict[str, Any]) -> Dict[str, Any]:
44
33
  """Estructura interna por nodo para acumular tiempos de corrida por glifo.
45
34
  Campos: curr (glifo actual), run (tiempo acumulado en el glifo actual)
46
35
  """
47
- st = nd.setdefault("_Tg", {"curr": None, "run": 0.0})
48
- st.setdefault("curr", None)
49
- st.setdefault("run", 0.0)
50
- return st
36
+ return nd.setdefault("_Tg", {"curr": None, "run": 0.0})
51
37
 
52
38
 
53
39
  # -------------
@@ -66,7 +52,7 @@ def _metrics_step(G, *args, **kwargs):
66
52
  if not G.graph.get("METRICS", DEFAULTS.get("METRICS", {})).get("enabled", True):
67
53
  return
68
54
 
69
- hist = _ensure_history(G)
55
+ hist = ensure_history(G)
70
56
  dt = float(G.graph.get("DT", 1.0))
71
57
  t = float(G.graph.get("_t", 0.0))
72
58
 
@@ -84,7 +70,7 @@ def _metrics_step(G, *args, **kwargs):
84
70
 
85
71
  for n in G.nodes():
86
72
  nd = G.nodes[n]
87
- g = _last_glifo(nd)
73
+ g = last_glifo(nd)
88
74
  if not g:
89
75
  continue
90
76
 
@@ -143,7 +129,7 @@ def register_metrics_callbacks(G) -> None:
143
129
 
144
130
  def Tg_global(G, normalize: bool = True) -> Dict[str, float]:
145
131
  """Tiempo glífico total por clase. Si normalize=True, devuelve fracciones del total."""
146
- hist = _ensure_history(G)
132
+ hist = ensure_history(G)
147
133
  tg_total: Dict[str, float] = hist.get("Tg_total", {})
148
134
  total = sum(tg_total.values()) or 1.0
149
135
  if normalize:
@@ -153,7 +139,7 @@ def Tg_global(G, normalize: bool = True) -> Dict[str, float]:
153
139
 
154
140
  def Tg_by_node(G, n, normalize: bool = False) -> Dict[str, float | List[float]]:
155
141
  """Resumen por nodo: si normalize, devuelve medias por glifo; si no, lista de corridas."""
156
- hist = _ensure_history(G)
142
+ hist = ensure_history(G)
157
143
  rec = hist.get("Tg_by_node", {}).get(n, {})
158
144
  if not normalize:
159
145
  # convertir default dict → list para serializar
@@ -167,7 +153,7 @@ def Tg_by_node(G, n, normalize: bool = False) -> Dict[str, float | List[float]]:
167
153
 
168
154
 
169
155
  def latency_series(G) -> Dict[str, List[float]]:
170
- hist = _ensure_history(G)
156
+ hist = ensure_history(G)
171
157
  xs = hist.get("latency_index", [])
172
158
  return {
173
159
  "t": [float(x.get("t", i)) for i, x in enumerate(xs)],
@@ -176,7 +162,7 @@ def latency_series(G) -> Dict[str, List[float]]:
176
162
 
177
163
 
178
164
  def glifogram_series(G) -> Dict[str, List[float]]:
179
- hist = _ensure_history(G)
165
+ hist = ensure_history(G)
180
166
  xs = hist.get("glifogram", [])
181
167
  if not xs:
182
168
  return {"t": []}
@@ -194,7 +180,7 @@ def glyph_top(G, k: int = 3) -> List[Tuple[str, float]]:
194
180
 
195
181
  def glyph_dwell_stats(G, n) -> Dict[str, Dict[str, float]]:
196
182
  """Estadísticos por nodo: mean/median/max de corridas por glifo."""
197
- hist = _ensure_history(G)
183
+ hist = ensure_history(G)
198
184
  rec = hist.get("Tg_by_node", {}).get(n, {})
199
185
  out = {}
200
186
  for g in GLYPHS_CANONICAL:
@@ -209,3 +195,40 @@ def glyph_dwell_stats(G, n) -> Dict[str, Dict[str, float]]:
209
195
  "count": int(len(runs)),
210
196
  }
211
197
  return out
198
+
199
+
200
+ # -----------------------------
201
+ # Export history to CSV/JSON
202
+ # -----------------------------
203
+
204
+ def export_history(G, base_path: str, fmt: str = "csv") -> None:
205
+ """Vuelca glifograma y traza σ(t) a archivos CSV o JSON compactos."""
206
+ hist = ensure_history(G)
207
+ glifo = glifogram_series(G)
208
+ sigma_mag = hist.get("sense_sigma_mag", [])
209
+ sigma = {
210
+ "t": list(range(len(sigma_mag))),
211
+ "sigma_x": hist.get("sense_sigma_x", []),
212
+ "sigma_y": hist.get("sense_sigma_y", []),
213
+ "mag": sigma_mag,
214
+ "angle": hist.get("sense_sigma_angle", []),
215
+ }
216
+ fmt = fmt.lower()
217
+ if fmt == "csv":
218
+ with open(base_path + "_glifogram.csv", "w", newline="") as f:
219
+ writer = csv.writer(f)
220
+ writer.writerow(["t", *GLYPHS_CANONICAL])
221
+ ts = glifo.get("t", [])
222
+ default_col = [0] * len(ts)
223
+ for i, t in enumerate(ts):
224
+ row = [t] + [glifo.get(g, default_col)[i] for g in GLYPHS_CANONICAL]
225
+ writer.writerow(row)
226
+ with open(base_path + "_sigma.csv", "w", newline="") as f:
227
+ writer = csv.writer(f)
228
+ writer.writerow(["t", "x", "y", "mag", "angle"])
229
+ for i, t in enumerate(sigma["t"]):
230
+ writer.writerow([t, sigma["sigma_x"][i], sigma["sigma_y"][i], sigma["mag"][i], sigma["angle"][i]])
231
+ else:
232
+ data = {"glifogram": glifo, "sigma": sigma}
233
+ with open(base_path + ".json", "w") as f:
234
+ json.dump(data, f)
@@ -190,31 +190,31 @@ def aplicar_glifo(G, n, glifo: str, *, window: Optional[int] = None) -> None:
190
190
  # -------------------------
191
191
 
192
192
  def _remesh_alpha_info(G):
193
- """Devuelve (alpha, source) con precedencia explícita."""
194
- hard = bool(G.graph.get("REMESH_ALPHA_HARD", DEFAULTS.get("REMESH_ALPHA_HARD", False)))
195
- gf = G.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"])
196
- if not hard and "REMESH_alpha" in gf:
197
- return float(gf["REMESH_alpha"]), "GLYPH_FACTORS"
198
- if "REMESH_ALPHA" in G.graph:
199
- return float(G.graph["REMESH_ALPHA"]), "G.graph"
193
+ """Devuelve `(alpha, source)` con precedencia explícita."""
194
+ if bool(G.graph.get("REMESH_ALPHA_HARD", DEFAULTS.get("REMESH_ALPHA_HARD", False))):
195
+ val = float(G.graph.get("REMESH_ALPHA", DEFAULTS["REMESH_ALPHA"]))
196
+ return val, "REMESH_ALPHA"
197
+ gf = G.graph.get("GLYPH_FACTORS", DEFAULTS.get("GLYPH_FACTORS", {}))
200
198
  if "REMESH_alpha" in gf:
201
- return float(gf["REMESH_alpha"]), "GLYPH_FACTORS"
202
- return float(DEFAULTS["REMESH_ALPHA"]), "DEFAULTS"
199
+ return float(gf["REMESH_alpha"]), "GLYPH_FACTORS.REMESH_alpha"
200
+ if "REMESH_ALPHA" in G.graph:
201
+ return float(G.graph["REMESH_ALPHA"]), "REMESH_ALPHA"
202
+ return float(DEFAULTS["REMESH_ALPHA"]), "DEFAULTS.REMESH_ALPHA"
203
203
 
204
204
 
205
205
  def aplicar_remesh_red(G) -> None:
206
- """
207
- RE’MESH a escala de red usando _epi_hist capturado en dynamics.step.
208
- Loguea meta con alpha/tau/step + topo_hash y checksums/medias de EPI antes/después.
209
- Precedencia de alpha: GLYPH_FACTORS → G.graph → DEFAULTS.
210
- """
211
- tau = int(G.graph.get("REMESH_TAU", DEFAULTS["REMESH_TAU"]))
206
+ """RE’MESH a escala de red usando _epi_hist con memoria multi-escala."""
207
+ tau_g = int(G.graph.get("REMESH_TAU_GLOBAL", G.graph.get("REMESH_TAU", DEFAULTS["REMESH_TAU_GLOBAL"])))
208
+ tau_l = int(G.graph.get("REMESH_TAU_LOCAL", G.graph.get("REMESH_TAU", DEFAULTS["REMESH_TAU_LOCAL"])))
209
+ tau_req = max(tau_g, tau_l)
212
210
  alpha, alpha_src = _remesh_alpha_info(G)
211
+ G.graph["_REMESH_ALPHA_SRC"] = alpha_src
213
212
  hist = G.graph.get("_epi_hist", deque())
214
- if len(hist) < tau + 1:
213
+ if len(hist) < tau_req + 1:
215
214
  return
216
215
 
217
- past = hist[-(tau + 1)]
216
+ past_g = hist[-(tau_g + 1)]
217
+ past_l = hist[-(tau_l + 1)]
218
218
 
219
219
  # --- Topología + snapshot EPI (ANTES) ---
220
220
  try:
@@ -239,8 +239,11 @@ def aplicar_remesh_red(G) -> None:
239
239
  for n in G.nodes():
240
240
  nd = G.nodes[n]
241
241
  epi_now = _get_attr(nd, ALIAS_EPI, 0.0)
242
- epi_old = float(past.get(n, epi_now))
243
- _set_attr(nd, ALIAS_EPI, (1 - alpha) * epi_now + alpha * epi_old)
242
+ epi_old_l = float(past_l.get(n, epi_now))
243
+ epi_old_g = float(past_g.get(n, epi_now))
244
+ mixed = (1 - alpha) * epi_now + alpha * epi_old_l
245
+ mixed = (1 - alpha) * mixed + alpha * epi_old_g
246
+ _set_attr(nd, ALIAS_EPI, mixed)
244
247
 
245
248
  # --- Snapshot EPI (DESPUÉS) ---
246
249
  epi_mean_after = list_mean(_get_attr(G.nodes[n], ALIAS_EPI, 0.0) for n in G.nodes())
@@ -253,7 +256,8 @@ def aplicar_remesh_red(G) -> None:
253
256
  meta = {
254
257
  "alpha": alpha,
255
258
  "alpha_source": alpha_src,
256
- "tau": tau,
259
+ "tau_global": tau_g,
260
+ "tau_local": tau_l,
257
261
  "step": step_idx,
258
262
  # firmas
259
263
  "topo_hash": topo_hash,
@@ -289,6 +293,9 @@ def aplicar_remesh_si_estabilizacion_global(G, pasos_estables_consecutivos: Opti
289
293
  req_extra = bool(G.graph.get("REMESH_REQUIRE_STABILITY", DEFAULTS["REMESH_REQUIRE_STABILITY"]))
290
294
  min_sync = float(G.graph.get("REMESH_MIN_PHASE_SYNC", DEFAULTS["REMESH_MIN_PHASE_SYNC"]))
291
295
  max_disr = float(G.graph.get("REMESH_MAX_GLYPH_DISR", DEFAULTS["REMESH_MAX_GLYPH_DISR"]))
296
+ min_sigma = float(G.graph.get("REMESH_MIN_SIGMA_MAG", DEFAULTS["REMESH_MIN_SIGMA_MAG"]))
297
+ min_R = float(G.graph.get("REMESH_MIN_KURAMOTO_R", DEFAULTS["REMESH_MIN_KURAMOTO_R"]))
298
+ min_sihi = float(G.graph.get("REMESH_MIN_SI_HI_FRAC", DEFAULTS["REMESH_MIN_SI_HI_FRAC"]))
292
299
 
293
300
  hist = G.graph.setdefault("history", {"stable_frac": []})
294
301
  sf = hist.get("stable_frac", [])
@@ -311,7 +318,22 @@ def aplicar_remesh_si_estabilizacion_global(G, pasos_estables_consecutivos: Opti
311
318
  if "glyph_load_disr" in hist and len(hist["glyph_load_disr"]) >= w_estab:
312
319
  win_disr = hist["glyph_load_disr"][-w_estab:]
313
320
  disr_ok = (sum(win_disr)/len(win_disr)) <= max_disr
314
- if not (ps_ok and disr_ok):
321
+ # magnitud de sigma (mayor mejor)
322
+ sig_ok = True
323
+ if "sense_sigma_mag" in hist and len(hist["sense_sigma_mag"]) >= w_estab:
324
+ win_sig = hist["sense_sigma_mag"][-w_estab:]
325
+ sig_ok = (sum(win_sig)/len(win_sig)) >= min_sigma
326
+ # orden de Kuramoto R (mayor mejor)
327
+ R_ok = True
328
+ if "kuramoto_R" in hist and len(hist["kuramoto_R"]) >= w_estab:
329
+ win_R = hist["kuramoto_R"][-w_estab:]
330
+ R_ok = (sum(win_R)/len(win_R)) >= min_R
331
+ # fracción de nodos con Si alto (mayor mejor)
332
+ sihi_ok = True
333
+ if "Si_hi_frac" in hist and len(hist["Si_hi_frac"]) >= w_estab:
334
+ win_sihi = hist["Si_hi_frac"][-w_estab:]
335
+ sihi_ok = (sum(win_sihi)/len(win_sihi)) >= min_sihi
336
+ if not (ps_ok and disr_ok and sig_ok and R_ok and sihi_ok):
315
337
  return
316
338
  # 3) Cooldown
317
339
  last = G.graph.get("_last_remesh_step", -10**9)
@@ -16,6 +16,9 @@ _PRESETS = {
16
16
  "SH’A",
17
17
  ),
18
18
  "ejemplo_canonico": ejemplo_canonico_basico(),
19
+ # Topologías fractales: expansión/contracción modular
20
+ "fractal_expand": seq(block("T’HOL", "VA’L", "U’M", repeat=2, close="NU’L"), "R’A"),
21
+ "fractal_contract": seq(block("T’HOL", "NU’L", "U’M", repeat=2, close="SH’A"), "R’A"),
19
22
  }
20
23
 
21
24
 
@@ -17,12 +17,18 @@ def build_graph(n: int = 24, topology: str = "ring", seed: int | None = 1):
17
17
  else:
18
18
  G = nx.path_graph(n)
19
19
 
20
+ # Valores canónicos para inicialización
21
+ inject_defaults(G, DEFAULTS)
22
+ vf_min = float(G.graph.get("VF_MIN", DEFAULTS["VF_MIN"]))
23
+ vf_max = float(G.graph.get("VF_MAX", DEFAULTS["VF_MAX"]))
24
+ th_min = float(G.graph.get("INIT_THETA_MIN", DEFAULTS.get("INIT_THETA_MIN", -3.1416)))
25
+ th_max = float(G.graph.get("INIT_THETA_MAX", DEFAULTS.get("INIT_THETA_MAX", 3.1416)))
26
+
20
27
  for i in G.nodes():
21
28
  nd = G.nodes[i]
22
29
  nd.setdefault("EPI", rng.uniform(0.1, 0.3))
23
- nd.setdefault("νf", rng.uniform(0.8, 1.2))
24
- nd.setdefault("θ", rng.uniform(-3.1416, 3.1416))
30
+ nd.setdefault("νf", rng.uniform(vf_min, vf_max))
31
+ nd.setdefault("θ", rng.uniform(th_min, th_max))
25
32
  nd.setdefault("Si", rng.uniform(0.4, 0.7))
26
33
 
27
- inject_defaults(G, DEFAULTS)
28
34
  return G
@@ -4,7 +4,7 @@ import math
4
4
  from collections import Counter
5
5
 
6
6
  from .constants import DEFAULTS, ALIAS_SI, ALIAS_EPI
7
- from .helpers import _get_attr, clamp01, register_callback
7
+ from .helpers import _get_attr, clamp01, register_callback, ensure_history, last_glifo
8
8
 
9
9
  # -------------------------
10
10
  # Canon: orden circular de glifos y ángulos
@@ -60,23 +60,14 @@ def _weight(G, n, mode: str) -> float:
60
60
  return 1.0
61
61
 
62
62
 
63
- def _last_glifo(nd: Dict[str, Any]) -> str | None:
64
- hist = nd.get("hist_glifos")
65
- if not hist:
66
- return None
67
- try:
68
- return list(hist)[-1]
69
- except Exception:
70
- return None
71
-
72
-
63
+
73
64
  # -------------------------
74
65
  # σ por nodo y σ global
75
66
  # -------------------------
76
67
 
77
68
  def sigma_vector_node(G, n, weight_mode: str | None = None) -> Dict[str, float] | None:
78
69
  nd = G.nodes[n]
79
- g = _last_glifo(nd)
70
+ g = last_glifo(nd)
80
71
  if g is None:
81
72
  return None
82
73
  w = _weight(G, n, weight_mode or G.graph.get("SIGMA", DEFAULTS["SIGMA"]).get("weight", "Si"))
@@ -120,17 +111,11 @@ def sigma_vector_global(G, weight_mode: str | None = None) -> Dict[str, float]:
120
111
  # Historia / series
121
112
  # -------------------------
122
113
 
123
- def _ensure_history(G):
124
- if "history" not in G.graph:
125
- G.graph["history"] = {}
126
- return G.graph["history"]
127
-
128
-
129
114
  def push_sigma_snapshot(G, t: float | None = None) -> None:
130
115
  cfg = G.graph.get("SIGMA", DEFAULTS["SIGMA"])
131
116
  if not cfg.get("enabled", True):
132
117
  return
133
- hist = _ensure_history(G)
118
+ hist = ensure_history(G)
134
119
  key = cfg.get("history_key", "sigma_global")
135
120
 
136
121
  # Global
@@ -153,7 +138,7 @@ def push_sigma_snapshot(G, t: float | None = None) -> None:
153
138
  # Conteo de glifos por paso (útil para rosa glífica)
154
139
  counts = Counter()
155
140
  for n in G.nodes():
156
- g = _last_glifo(G.nodes[n])
141
+ g = last_glifo(G.nodes[n])
157
142
  if g:
158
143
  counts[g] += 1
159
144
  hist.setdefault("sigma_counts", []).append({"t": sv["t"], **counts})
@@ -163,7 +148,7 @@ def push_sigma_snapshot(G, t: float | None = None) -> None:
163
148
  per = hist.setdefault("sigma_per_node", {})
164
149
  for n in G.nodes():
165
150
  nd = G.nodes[n]
166
- g = _last_glifo(nd)
151
+ g = last_glifo(nd)
167
152
  if not g:
168
153
  continue
169
154
  a = glyph_angle(g)
@@ -3,7 +3,7 @@ from typing import Any, Dict, List, Optional
3
3
  from collections import Counter
4
4
 
5
5
  from .constants import DEFAULTS
6
- from .helpers import register_callback
6
+ from .helpers import register_callback, ensure_history, last_glifo
7
7
 
8
8
  try:
9
9
  from .gamma import kuramoto_R_psi
@@ -30,22 +30,6 @@ DEFAULTS.setdefault("TRACE", {
30
30
  # Helpers
31
31
  # -------------------------
32
32
 
33
- def _ensure_history(G):
34
- if "history" not in G.graph:
35
- G.graph["history"] = {}
36
- return G.graph["history"]
37
-
38
-
39
- def _last_glifo(nd: Dict[str, Any]) -> str | None:
40
- h = nd.get("hist_glifos")
41
- if not h:
42
- return None
43
- try:
44
- return list(h)[-1]
45
- except Exception:
46
- return None
47
-
48
-
49
33
  # -------------------------
50
34
  # Snapshots
51
35
  # -------------------------
@@ -55,7 +39,7 @@ def _trace_before(G, *args, **kwargs):
55
39
  return
56
40
  cfg = G.graph.get("TRACE", DEFAULTS["TRACE"])
57
41
  capture: List[str] = list(cfg.get("capture", []))
58
- hist = _ensure_history(G)
42
+ hist = ensure_history(G)
59
43
  key = cfg.get("history_key", "trace_meta")
60
44
 
61
45
  meta: Dict[str, Any] = {"t": float(G.graph.get("_t", 0.0)), "phase": "before"}
@@ -103,7 +87,7 @@ def _trace_after(G, *args, **kwargs):
103
87
  return
104
88
  cfg = G.graph.get("TRACE", DEFAULTS["TRACE"])
105
89
  capture: List[str] = list(cfg.get("capture", []))
106
- hist = _ensure_history(G)
90
+ hist = ensure_history(G)
107
91
  key = cfg.get("history_key", "trace_meta")
108
92
 
109
93
  meta: Dict[str, Any] = {"t": float(G.graph.get("_t", 0.0)), "phase": "after"}
@@ -119,7 +103,7 @@ def _trace_after(G, *args, **kwargs):
119
103
  if "glifo_counts" in capture:
120
104
  cnt = Counter()
121
105
  for n in G.nodes():
122
- g = _last_glifo(G.nodes[n])
106
+ g = last_glifo(G.nodes[n])
123
107
  if g:
124
108
  cnt[g] += 1
125
109
  meta["glifos"] = dict(cnt)
@@ -1,12 +1,12 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tnfr
3
- Version: 4.1.0
4
- Summary: TNFR canónica: dinámica glífica modular sobre redes.
3
+ Version: 4.3.0
4
+ Summary: Canonical TNFR: modular glyph-based dynamics on networks.
5
5
  Author: fmg
6
6
  License: MIT
7
7
  Project-URL: Homepage, https://pypi.org/project/tnfr/
8
8
  Project-URL: Repository, https://github.com/fermga/Teoria-de-la-naturaleza-fractal-resonante-TNFR-
9
- Keywords: TNFR,fractal resonante,resonancia,glifos,networkx,dinámica,coherencia,EPI,Kuramoto
9
+ Keywords: TNFR,resonant fractal,resonance,glyphs,networkx,dynamics,coherence,EPI,Kuramoto
10
10
  Classifier: Programming Language :: Python :: 3
11
11
  Classifier: Programming Language :: Python :: 3 :: Only
12
12
  Classifier: Programming Language :: Python :: 3.9
@@ -25,7 +25,13 @@ License-File: LICENSE.md
25
25
  Requires-Dist: networkx>=2.6
26
26
  Dynamic: license-file
27
27
 
28
- # General Project Structure
28
+ # TNFR Canonical Glyph-Based Dynamics
29
+
30
+ Reference implementation of the Resonant Fractal Nature Theory (TNFR).
31
+ It models glyph-driven dynamics on NetworkX graphs, providing a modular
32
+ engine to simulate coherent reorganization processes.
33
+
34
+ ## General Project Structure
29
35
 
30
36
  * **Package entry point.** `__init__.py` registers modules under short names to avoid circular imports and exposes the public API: `preparar_red`, `step`, `run`, and observation utilities.
31
37
 
@@ -25,11 +25,13 @@ src/tnfr.egg-info/dependency_links.txt
25
25
  src/tnfr.egg-info/entry_points.txt
26
26
  src/tnfr.egg-info/requires.txt
27
27
  src/tnfr.egg-info/top_level.txt
28
+ tests/test_canon.py
28
29
  tests/test_cli_sanity.py
29
30
  tests/test_edge_cases.py
30
31
  tests/test_gamma.py
31
32
  tests/test_grammar.py
32
33
  tests/test_history.py
34
+ tests/test_history_series.py
33
35
  tests/test_invariants.py
34
36
  tests/test_program.py
35
37
  tests/test_remesh.py
@@ -0,0 +1,30 @@
1
+ from tnfr.scenarios import build_graph
2
+ from tnfr.dynamics import validate_canon
3
+
4
+
5
+ def test_build_graph_vf_within_limits():
6
+ G = build_graph(n=10, topology="ring", seed=42)
7
+ vf_min = G.graph["VF_MIN"]
8
+ vf_max = G.graph["VF_MAX"]
9
+ for n in G.nodes():
10
+ vf = G.nodes[n]["νf"]
11
+ assert vf_min <= vf <= vf_max
12
+
13
+
14
+ def test_validate_canon_clamps():
15
+ G = build_graph(n=5, topology="ring", seed=1)
16
+ for n in G.nodes():
17
+ nd = G.nodes[n]
18
+ nd["νf"] = 2.0
19
+ nd["EPI"] = 2.0
20
+ nd["θ"] = 5.0
21
+ validate_canon(G)
22
+ vf_min = G.graph["VF_MIN"]
23
+ vf_max = G.graph["VF_MAX"]
24
+ epi_min = G.graph["EPI_MIN"]
25
+ epi_max = G.graph["EPI_MAX"]
26
+ for n in G.nodes():
27
+ nd = G.nodes[n]
28
+ assert vf_min <= nd["νf"] <= vf_max
29
+ assert epi_min <= nd["EPI"] <= epi_max
30
+ assert -3.1416 <= nd["θ"] <= 3.1416
@@ -1,5 +1,6 @@
1
1
  import pytest
2
2
  import networkx as nx
3
+ import pytest
3
4
 
4
5
  from tnfr.dynamics import default_compute_delta_nfr, update_epi_via_nodal_equation
5
6
 
@@ -0,0 +1,27 @@
1
+ import networkx as nx
2
+
3
+ from tnfr.constants import attach_defaults
4
+ from tnfr.dynamics import step
5
+ from tnfr.gamma import GAMMA_REGISTRY
6
+
7
+
8
+ def test_history_delta_si_and_B():
9
+ G = nx.Graph()
10
+ G.add_node(0, EPI=0.0, νf=0.5, θ=0.0)
11
+ attach_defaults(G)
12
+ step(G, apply_glyphs=False)
13
+ step(G, apply_glyphs=False)
14
+ hist = G.graph.get("history", {})
15
+ assert "delta_Si" in hist and len(hist["delta_Si"]) >= 2
16
+ assert "B" in hist and len(hist["B"]) >= 2
17
+
18
+
19
+ def test_gamma_kuramoto_tanh_registry():
20
+ G = nx.Graph()
21
+ G.add_nodes_from([0, 1])
22
+ attach_defaults(G)
23
+ G.nodes[0]["θ"] = 0.0
24
+ G.nodes[1]["θ"] = 0.0
25
+ cfg = {"type": "kuramoto_tanh", "beta": 0.5, "k": 2.0, "R0": 0.0}
26
+ val = GAMMA_REGISTRY["kuramoto_tanh"](G, 0, 0.0, cfg)
27
+ assert abs(val) <= cfg["beta"]
@@ -2,6 +2,11 @@ from __future__ import annotations
2
2
  import math
3
3
  import pytest
4
4
 
5
+ from tnfr.constants import inject_defaults, DEFAULTS
6
+ from tnfr.scenarios import build_graph
7
+ import math
8
+ import pytest
9
+
5
10
  from tnfr.constants import inject_defaults, DEFAULTS
6
11
  from tnfr.scenarios import build_graph
7
12
  from tnfr.dynamics import step, _update_history
@@ -1,5 +1,6 @@
1
1
  import networkx as nx
2
2
  from collections import deque
3
+ import networkx as nx
3
4
 
4
5
  from tnfr.constants import attach_defaults
5
6
  from tnfr.operators import aplicar_remesh_si_estabilizacion_global
@@ -27,3 +28,21 @@ def test_aplicar_remesh_usa_parametro_personalizado():
27
28
  aplicar_remesh_si_estabilizacion_global(G, pasos_estables_consecutivos=3)
28
29
  assert G.graph["_last_remesh_step"] == len(hist["stable_frac"])
29
30
 
31
+
32
+ def test_remesh_alpha_hard_ignores_glyph_factor():
33
+ G = nx.Graph()
34
+ G.add_node(0)
35
+ attach_defaults(G)
36
+ hist = G.graph.setdefault("history", {})
37
+ hist["stable_frac"] = [1.0, 1.0, 1.0]
38
+ tau = G.graph["REMESH_TAU"]
39
+ maxlen = max(2 * tau + 5, 64)
40
+ G.graph["_epi_hist"] = deque([{0: 0.0} for _ in range(tau + 1)], maxlen=maxlen)
41
+ G.graph["REMESH_ALPHA"] = 0.7
42
+ G.graph["REMESH_ALPHA_HARD"] = True
43
+ G.graph["GLYPH_FACTORS"]["REMESH_alpha"] = 0.1
44
+ aplicar_remesh_si_estabilizacion_global(G, pasos_estables_consecutivos=3)
45
+ meta = G.graph.get("_REMESH_META", {})
46
+ assert meta.get("alpha") == 0.7
47
+ assert G.graph.get("_REMESH_ALPHA_SRC") == "REMESH_ALPHA"
48
+
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes