tnfr 3.5.0__py3-none-any.whl → 4.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

tnfr/helpers.py CHANGED
@@ -4,7 +4,7 @@ helpers.py — TNFR canónica
4
4
  Utilidades transversales + cálculo de Índice de sentido (Si).
5
5
  """
6
6
  from __future__ import annotations
7
- from typing import Iterable, Dict, Any, Tuple, List
7
+ from typing import Iterable, Dict, Any
8
8
  import math
9
9
  from collections import deque
10
10
  from statistics import fmean, StatisticsError
@@ -21,19 +21,23 @@ from .constants import DEFAULTS, ALIAS_VF, ALIAS_THETA, ALIAS_DNFR, ALIAS_EPI, A
21
21
  # -------------------------
22
22
 
23
23
  def clamp(x: float, a: float, b: float) -> float:
24
+ """Constriñe ``x`` al intervalo cerrado [a, b]."""
24
25
  return a if x < a else b if x > b else x
25
26
 
26
27
 
27
28
  def clamp_abs(x: float, m: float) -> float:
29
+ """Limita ``x`` al rango simétrico [-m, m] usando ``abs(m)``."""
28
30
  m = abs(m)
29
31
  return clamp(x, -m, m)
30
32
 
31
33
 
32
34
  def clamp01(x: float) -> float:
35
+ """Ataja ``x`` a la banda [0, 1]."""
33
36
  return clamp(x, 0.0, 1.0)
34
37
 
35
38
 
36
39
  def list_mean(xs: Iterable[float], default: float = 0.0) -> float:
40
+ """Promedio aritmético o ``default`` si ``xs`` está vacío."""
37
41
  try:
38
42
  return fmean(xs)
39
43
  except StatisticsError:
@@ -77,16 +81,14 @@ def _set_attr(d, aliases, value: float) -> None:
77
81
  # -------------------------
78
82
 
79
83
  def media_vecinal(G, n, aliases: Iterable[str], default: float = 0.0) -> float:
80
- vals: List[float] = []
81
- for v in G.neighbors(n):
82
- vals.append(_get_attr(G.nodes[v], aliases, default))
84
+ """Media del atributo indicado por ``aliases`` en los vecinos de ``n``."""
85
+ vals = (_get_attr(G.nodes[v], aliases, default) for v in G.neighbors(n))
83
86
  return list_mean(vals, default)
84
87
 
85
88
 
86
89
  def fase_media(G, n) -> float:
87
90
  """Promedio circular de las fases de los vecinos."""
88
- x = 0.0
89
- y = 0.0
91
+ x = y = 0.0
90
92
  count = 0
91
93
  for v in G.neighbors(n):
92
94
  th = _get_attr(G.nodes[v], ALIAS_THETA, 0.0)
@@ -95,7 +97,7 @@ def fase_media(G, n) -> float:
95
97
  count += 1
96
98
  if count == 0:
97
99
  return _get_attr(G.nodes[n], ALIAS_THETA, 0.0)
98
- return math.atan2(y / max(1, count), x / max(1, count))
100
+ return math.atan2(y / count, x / count)
99
101
 
100
102
 
101
103
  # -------------------------
@@ -103,16 +105,24 @@ def fase_media(G, n) -> float:
103
105
  # -------------------------
104
106
 
105
107
  def push_glifo(nd: Dict[str, Any], glifo: str, window: int) -> None:
108
+ """Añade ``glifo`` al historial del nodo con tamaño máximo ``window``."""
106
109
  hist = nd.setdefault("hist_glifos", deque(maxlen=window))
107
110
  hist.append(str(glifo))
108
111
 
109
112
 
110
113
  def reciente_glifo(nd: Dict[str, Any], glifo: str, ventana: int) -> bool:
114
+ """Indica si ``glifo`` apareció en las últimas ``ventana`` emisiones."""
111
115
  hist = nd.get("hist_glifos")
112
116
  if not hist:
113
117
  return False
114
- last = list(hist)[-ventana:]
115
- return str(glifo) in last
118
+ gl = str(glifo)
119
+ for g in reversed(hist):
120
+ if g == gl:
121
+ return True
122
+ ventana -= 1
123
+ if ventana <= 0:
124
+ break
125
+ return False
116
126
 
117
127
  # -------------------------
118
128
  # Callbacks Γ(R)
@@ -130,10 +140,26 @@ def _ensure_callbacks(G):
130
140
  cbs.setdefault(k, [])
131
141
  return cbs
132
142
 
133
- def register_callback(G, event: str, func):
134
- """Registra un callback en G.graph['callbacks'][event]. Firma: func(G, ctx) -> None"""
143
+ def register_callback(
144
+ G,
145
+ event: str | None = None,
146
+ func=None,
147
+ *,
148
+ when: str | None = None,
149
+ name: str | None = None,
150
+ ):
151
+ """Registra ``func`` como callback del ``event`` indicado.
152
+
153
+ Permite tanto la forma posicional ``register_callback(G, "after_step", fn)``
154
+ como la forma con palabras clave ``register_callback(G, when="after_step", func=fn)``.
155
+ El parámetro ``name`` se acepta por compatibilidad pero actualmente no se
156
+ utiliza.
157
+ """
158
+ event = event or when
135
159
  if event not in ("before_step", "after_step", "on_remesh"):
136
160
  raise ValueError(f"Evento desconocido: {event}")
161
+ if func is None:
162
+ raise TypeError("func es obligatorio")
137
163
  cbs = _ensure_callbacks(G)
138
164
  cbs[event].append(func)
139
165
  return func
tnfr/main.py CHANGED
@@ -1,7 +1,11 @@
1
1
  from __future__ import annotations
2
- import argparse, sys
3
- import networkx as nx
4
- from . import preparar_red, run, __version__
2
+ import argparse, sys
3
+ import networkx as nx
4
+ from . import preparar_red, run, __version__
5
+ from .constants import merge_overrides, attach_defaults
6
+ from .sense import register_sigma_callback
7
+ from .metrics import register_metrics_callbacks
8
+ from .trace import register_trace
5
9
 
6
10
  def main(argv: list[str] | None = None) -> None:
7
11
  p = argparse.ArgumentParser(
@@ -15,13 +19,19 @@ def main(argv: list[str] | None = None) -> None:
15
19
  p.add_argument("--observer", action="store_true", help="adjunta observador estándar")
16
20
  args = p.parse_args(argv)
17
21
 
18
- if args.version:
19
- print(__version__)
20
- return
21
-
22
- G = nx.erdos_renyi_graph(args.n, args.p)
23
- preparar_red(G, ATTACH_STD_OBSERVER=bool(args.observer))
24
- run(G, args.steps)
22
+ if args.version:
23
+ print(__version__)
24
+ return
25
+
26
+ G = nx.erdos_renyi_graph(args.n, args.p)
27
+ preparar_red(G, ATTACH_STD_OBSERVER=bool(args.observer))
28
+ attach_defaults(G)
29
+ register_sigma_callback(G)
30
+ register_metrics_callbacks(G)
31
+ register_trace(G)
32
+ # Ejemplo: activar Γi(R) lineal con β=0.2 y R0=0.5
33
+ merge_overrides(G, GAMMA={"type": "kuramoto_linear", "beta": 0.2, "R0": 0.5})
34
+ run(G, args.steps)
25
35
 
26
36
  h = G.graph.get("history", {})
27
37
  C = h.get("C_steps", [])[-1] if h.get("C_steps") else None
tnfr/metrics.py ADDED
@@ -0,0 +1,211 @@
1
+ from __future__ import annotations
2
+ from typing import Dict, Any, List, Tuple
3
+ from collections import defaultdict, Counter
4
+ import statistics
5
+
6
+ from .constants import DEFAULTS
7
+ from .helpers import _get_attr, clamp01, register_callback
8
+ from .sense import GLYPHS_CANONICAL
9
+
10
+ # -------------
11
+ # DEFAULTS
12
+ # -------------
13
+ DEFAULTS.setdefault("METRICS", {
14
+ "enabled": True,
15
+ "save_by_node": True, # guarda Tg por nodo (más pesado)
16
+ "normalize_series": False # glifograma normalizado a fracción por paso
17
+ })
18
+
19
+ # -------------
20
+ # Utilidades internas
21
+ # -------------
22
+
23
+ def _ensure_history(G):
24
+ if "history" not in G.graph:
25
+ G.graph["history"] = {}
26
+ return G.graph["history"]
27
+
28
+
29
+ def _last_glifo(nd: Dict[str, Any]) -> str | None:
30
+ hist = nd.get("hist_glifos")
31
+ if not hist:
32
+ return None
33
+ try:
34
+ return list(hist)[-1]
35
+ except Exception:
36
+ return None
37
+
38
+
39
+ # -------------
40
+ # Estado nodal para Tg
41
+ # -------------
42
+
43
+ def _tg_state(nd: Dict[str, Any]) -> Dict[str, Any]:
44
+ """Estructura interna por nodo para acumular tiempos de corrida por glifo.
45
+ Campos: curr (glifo actual), run (tiempo acumulado en el glifo actual)
46
+ """
47
+ st = nd.setdefault("_Tg", {"curr": None, "run": 0.0})
48
+ st.setdefault("curr", None)
49
+ st.setdefault("run", 0.0)
50
+ return st
51
+
52
+
53
+ # -------------
54
+ # Callback principal: actualizar métricas por paso
55
+ # -------------
56
+
57
+ def _metrics_step(G, *args, **kwargs):
58
+ """Actualiza métricas operativas TNFR por paso.
59
+
60
+ - Tg (tiempo glífico): sumatoria de corridas por glifo (global y por nodo).
61
+ - Índice de latencia: fracción de nodos en SH’A.
62
+ - Glifograma: conteo o fracción por glifo en el paso.
63
+
64
+ Todos los resultados se guardan en G.graph['history'].
65
+ """
66
+ if not G.graph.get("METRICS", DEFAULTS.get("METRICS", {})).get("enabled", True):
67
+ return
68
+
69
+ hist = _ensure_history(G)
70
+ dt = float(G.graph.get("DT", 1.0))
71
+ t = float(G.graph.get("_t", 0.0))
72
+
73
+ # --- Glifograma (conteos por glifo este paso) ---
74
+ counts = Counter()
75
+
76
+ # --- Índice de latencia: proporción de nodos en SH’A ---
77
+ n_total = 0
78
+ n_latent = 0
79
+
80
+ # --- Tg: acumular corridas por nodo ---
81
+ save_by_node = bool(G.graph.get("METRICS", DEFAULTS["METRICS"]).get("save_by_node", True))
82
+ tg_total = hist.setdefault("Tg_total", defaultdict(float)) # tiempo total por glifo (global)
83
+ tg_by_node = hist.setdefault("Tg_by_node", {}) # nodo → {glifo: [runs,...]}
84
+
85
+ for n in G.nodes():
86
+ nd = G.nodes[n]
87
+ g = _last_glifo(nd)
88
+ if not g:
89
+ continue
90
+
91
+ n_total += 1
92
+ if g == "SH’A":
93
+ n_latent += 1
94
+
95
+ counts[g] += 1
96
+
97
+ st = _tg_state(nd)
98
+ # Si seguimos en el mismo glifo, acumulamos; si cambiamos, cerramos corrida
99
+ if st["curr"] is None:
100
+ st["curr"] = g
101
+ st["run"] = dt
102
+ elif g == st["curr"]:
103
+ st["run"] += dt
104
+ else:
105
+ # cerramos corrida anterior
106
+ prev = st["curr"]
107
+ dur = float(st["run"])
108
+ tg_total[prev] += dur
109
+ if save_by_node:
110
+ rec = tg_by_node.setdefault(n, defaultdict(list))
111
+ rec[prev].append(dur)
112
+ # reiniciamos corrida
113
+ st["curr"] = g
114
+ st["run"] = dt
115
+
116
+ # Al final del paso, no cerramos la corrida actual: se cerrará cuando cambie.
117
+
118
+ # Guardar glifograma (conteos crudos y normalizados)
119
+ norm = bool(G.graph.get("METRICS", DEFAULTS["METRICS"]).get("normalize_series", False))
120
+ row = {"t": t}
121
+ total = max(1, sum(counts.values()))
122
+ for g in GLYPHS_CANONICAL:
123
+ c = counts.get(g, 0)
124
+ row[g] = (c / total) if norm else c
125
+ hist.setdefault("glifogram", []).append(row)
126
+
127
+ # Guardar índice de latencia
128
+ li = (n_latent / max(1, n_total)) if n_total else 0.0
129
+ hist.setdefault("latency_index", []).append({"t": t, "value": li})
130
+
131
+
132
+ # -------------
133
+ # Registro del callback
134
+ # -------------
135
+
136
+ def register_metrics_callbacks(G) -> None:
137
+ register_callback(G, when="after_step", func=_metrics_step, name="metrics_step")
138
+
139
+
140
+ # -------------
141
+ # Consultas / reportes
142
+ # -------------
143
+
144
+ def Tg_global(G, normalize: bool = True) -> Dict[str, float]:
145
+ """Tiempo glífico total por clase. Si normalize=True, devuelve fracciones del total."""
146
+ hist = _ensure_history(G)
147
+ tg_total: Dict[str, float] = hist.get("Tg_total", {})
148
+ total = sum(tg_total.values()) or 1.0
149
+ if normalize:
150
+ return {g: float(tg_total.get(g, 0.0)) / total for g in GLYPHS_CANONICAL}
151
+ return {g: float(tg_total.get(g, 0.0)) for g in GLYPHS_CANONICAL}
152
+
153
+
154
+ def Tg_by_node(G, n, normalize: bool = False) -> Dict[str, float | List[float]]:
155
+ """Resumen por nodo: si normalize, devuelve medias por glifo; si no, lista de corridas."""
156
+ hist = _ensure_history(G)
157
+ rec = hist.get("Tg_by_node", {}).get(n, {})
158
+ if not normalize:
159
+ # convertir default dict → list para serializar
160
+ return {g: list(rec.get(g, [])) for g in GLYPHS_CANONICAL}
161
+ out = {}
162
+ for g in GLYPHS_CANONICAL:
163
+ runs = rec.get(g, [])
164
+ out[g] = float(statistics.mean(runs)) if runs else 0.0
165
+
166
+ return out
167
+
168
+
169
+ def latency_series(G) -> Dict[str, List[float]]:
170
+ hist = _ensure_history(G)
171
+ xs = hist.get("latency_index", [])
172
+ return {
173
+ "t": [float(x.get("t", i)) for i, x in enumerate(xs)],
174
+ "value": [float(x.get("value", 0.0)) for x in xs],
175
+ }
176
+
177
+
178
+ def glifogram_series(G) -> Dict[str, List[float]]:
179
+ hist = _ensure_history(G)
180
+ xs = hist.get("glifogram", [])
181
+ if not xs:
182
+ return {"t": []}
183
+ out = {"t": [float(x.get("t", i)) for i, x in enumerate(xs)]}
184
+ for g in GLYPHS_CANONICAL:
185
+ out[g] = [float(x.get(g, 0.0)) for x in xs]
186
+ return out
187
+
188
+
189
+ def glyph_top(G, k: int = 3) -> List[Tuple[str, float]]:
190
+ """Top-k glifos por Tg_global (fracción)."""
191
+ tg = Tg_global(G, normalize=True)
192
+ return sorted(tg.items(), key=lambda kv: kv[1], reverse=True)[:max(1, int(k))]
193
+
194
+
195
+ def glyph_dwell_stats(G, n) -> Dict[str, Dict[str, float]]:
196
+ """Estadísticos por nodo: mean/median/max de corridas por glifo."""
197
+ hist = _ensure_history(G)
198
+ rec = hist.get("Tg_by_node", {}).get(n, {})
199
+ out = {}
200
+ for g in GLYPHS_CANONICAL:
201
+ runs = list(rec.get(g, []))
202
+ if not runs:
203
+ out[g] = {"mean": 0.0, "median": 0.0, "max": 0.0, "count": 0}
204
+ else:
205
+ out[g] = {
206
+ "mean": float(statistics.mean(runs)),
207
+ "median": float(statistics.median(runs)),
208
+ "max": float(max(runs)),
209
+ "count": int(len(runs)),
210
+ }
211
+ return out
tnfr/operators.py CHANGED
@@ -154,6 +154,18 @@ _NAME_TO_OP = {
154
154
 
155
155
 
156
156
  def aplicar_glifo(G, n, glifo: str, *, window: Optional[int] = None) -> None:
157
+ """Aplica un glifo TNFR al nodo `n` con histéresis `window`.
158
+
159
+ Los 13 glifos implementan reorganizadores canónicos:
160
+ A’L (emisión), E’N (recepción), I’L (coherencia), O’Z (disonancia),
161
+ U’M (acoplamiento), R’A (resonancia), SH’A (silencio), VA’L (expansión),
162
+ NU’L (contracción), T’HOL (autoorganización), Z’HIR (mutación),
163
+ NA’V (transición), RE’MESH (recursividad).
164
+
165
+ Relación con la gramática: la selección previa debe pasar por
166
+ `enforce_canonical_grammar` (grammar.py) para respetar compatibilidades,
167
+ precondición O’Z→Z’HIR y cierres T’HOL[...].
168
+ """
157
169
  glifo = str(glifo)
158
170
  op = _NAME_TO_OP.get(glifo)
159
171
  if not op:
tnfr/presets.py ADDED
@@ -0,0 +1,24 @@
1
+ from __future__ import annotations
2
+ from .program import seq, block, wait
3
+
4
+
5
+ _PRESETS = {
6
+ "arranque_resonante": seq("A’L", "E’N", "I’L", "R’A", "VA’L", "U’M", wait(3), "SH’A"),
7
+ "mutacion_contenida": seq("A’L", "E’N", block("O’Z", "Z’HIR", "I’L", repeat=2), "R’A", "SH’A"),
8
+ "exploracion_acople": seq(
9
+ "A’L",
10
+ "E’N",
11
+ "I’L",
12
+ "VA’L",
13
+ "U’M",
14
+ block("O’Z", "NA’V", "I’L", repeat=1),
15
+ "R’A",
16
+ "SH’A",
17
+ ),
18
+ }
19
+
20
+
21
+ def get_preset(name: str):
22
+ if name not in _PRESETS:
23
+ raise KeyError(f"Preset no encontrado: {name}")
24
+ return _PRESETS[name]
tnfr/program.py ADDED
@@ -0,0 +1,168 @@
1
+ from __future__ import annotations
2
+ """program.py — API de secuencias canónicas con T’HOL como primera clase."""
3
+ from typing import Any, Callable, Iterable, List, Optional, Sequence, Tuple, Union
4
+ from dataclasses import dataclass
5
+ from contextlib import contextmanager
6
+
7
+ from .constants import DEFAULTS
8
+ from .helpers import register_callback
9
+ from .grammar import enforce_canonical_grammar, on_applied_glifo
10
+ from .operators import aplicar_glifo
11
+ from .sense import GLYPHS_CANONICAL
12
+
13
+ # Tipos básicos
14
+ Glyph = str
15
+ Node = Any
16
+ AdvanceFn = Callable[[Any], None] # normalmente dynamics.step
17
+
18
+ # ---------------------
19
+ # Construcciones del DSL
20
+ # ---------------------
21
+
22
+ @dataclass
23
+ class WAIT:
24
+ steps: int = 1
25
+
26
+ @dataclass
27
+ class TARGET:
28
+ nodes: Optional[Iterable[Node]] = None # None = todos los nodos
29
+
30
+ @dataclass
31
+ class THOL:
32
+ body: Sequence[Any]
33
+ repeat: int = 1 # cuántas veces repetir el cuerpo
34
+ force_close: Optional[Glyph] = None # None → cierre automático (gramática); 'SH’A' o 'NU’L' para forzar
35
+
36
+ Token = Union[Glyph, WAIT, TARGET, THOL]
37
+
38
+ # ---------------------
39
+ # Utilidades internas
40
+ # ---------------------
41
+
42
+ @contextmanager
43
+ def _forced_selector(G, glyph: Glyph):
44
+ """Sobrescribe temporalmente el selector glífico para forzar `glyph`.
45
+ Pasa por la gramática canónica antes de aplicar.
46
+ """
47
+ prev = G.graph.get("glyph_selector")
48
+ def selector_forced(_G, _n):
49
+ return glyph
50
+ G.graph["glyph_selector"] = selector_forced
51
+ try:
52
+ yield
53
+ finally:
54
+ if prev is None:
55
+ G.graph.pop("glyph_selector", None)
56
+ else:
57
+ G.graph["glyph_selector"] = prev
58
+
59
+ def _window(G) -> int:
60
+ return int(G.graph.get("GLYPH_HYSTERESIS_WINDOW", DEFAULTS.get("GLYPH_HYSTERESIS_WINDOW", 1)))
61
+
62
+ def _all_nodes(G):
63
+ return list(G.nodes())
64
+
65
+ # ---------------------
66
+ # Núcleo de ejecución
67
+ # ---------------------
68
+
69
+ def _apply_glyph_to_targets(G, g: Glyph, nodes: Optional[Iterable[Node]] = None):
70
+ nodes = list(nodes) if nodes is not None else _all_nodes(G)
71
+ w = _window(G)
72
+ # Pasamos por la gramática antes de aplicar
73
+ for n in nodes:
74
+ g_eff = enforce_canonical_grammar(G, n, g)
75
+ aplicar_glifo(G, n, g_eff, window=w)
76
+ on_applied_glifo(G, n, g_eff)
77
+
78
+ def _advance(G, step_fn: Optional[AdvanceFn] = None):
79
+ if step_fn is None:
80
+ from .dynamics import step as step_fn
81
+ step_fn(G)
82
+
83
+ # ---------------------
84
+ # Compilación de secuencia → lista de operaciones atómicas
85
+ # ---------------------
86
+
87
+ def _flatten(seq: Sequence[Token], current_target: Optional[TARGET] = None) -> List[Tuple[str, Any]]:
88
+ """Devuelve lista de operaciones (op, payload).
89
+ op ∈ { 'GLYPH', 'WAIT', 'TARGET' }.
90
+ """
91
+ ops: List[Tuple[str, Any]] = []
92
+ for item in seq:
93
+ if isinstance(item, TARGET):
94
+ ops.append(("TARGET", item))
95
+ elif isinstance(item, WAIT):
96
+ ops.append(("WAIT", item.steps))
97
+ elif isinstance(item, THOL):
98
+ # abrir bloque T’HOL
99
+ ops.append(("GLYPH", "T’HOL"))
100
+ for _ in range(max(1, int(item.repeat))):
101
+ ops.extend(_flatten(item.body, current_target))
102
+ # cierre explícito si se pidió; si no, la gramática puede cerrarlo
103
+ if item.force_close in ("SH’A", "NU’L"):
104
+ ops.append(("GLYPH", item.force_close))
105
+ else:
106
+ # item debería ser un glifo
107
+ g = str(item)
108
+ if g not in GLYPHS_CANONICAL:
109
+ # Permitimos glifos no listados (compat futuros), pero no forzamos
110
+ pass
111
+ ops.append(("GLYPH", g))
112
+ return ops
113
+
114
+ # ---------------------
115
+ # API pública
116
+ # ---------------------
117
+
118
+ def play(G, sequence: Sequence[Token], step_fn: Optional[AdvanceFn] = None) -> None:
119
+ """Ejecuta una secuencia canónica sobre el grafo `G`.
120
+
121
+ Reglas:
122
+ - Usa `TARGET(nodes=...)` para cambiar el subconjunto de aplicación.
123
+ - `WAIT(k)` avanza k pasos con el selector vigente (no fuerza glifo).
124
+ - `THOL([...], repeat=r, force_close=…)` abre un bloque autoorganizativo,
125
+ repite el cuerpo y (opcional) fuerza cierre con SH’A/NU’L.
126
+ - Los glifos se aplican pasando por `enforce_canonical_grammar`.
127
+ """
128
+ ops = _flatten(sequence)
129
+ curr_target: Optional[Iterable[Node]] = None
130
+
131
+ # Traza de programa en history
132
+ if "history" not in G.graph:
133
+ G.graph["history"] = {}
134
+ trace = G.graph["history"].setdefault("program_trace", [])
135
+
136
+ for op, payload in ops:
137
+ if op == "TARGET":
138
+ curr_target = list(payload.nodes) if payload.nodes is not None else None
139
+ trace.append({"t": float(G.graph.get("_t", 0.0)), "op": "TARGET", "n": len(curr_target or _all_nodes(G))})
140
+ continue
141
+ if op == "WAIT":
142
+ for _ in range(max(1, int(payload))):
143
+ _advance(G, step_fn)
144
+ trace.append({"t": float(G.graph.get("_t", 0.0)), "op": "WAIT", "k": int(payload)})
145
+ continue
146
+ if op == "GLYPH":
147
+ g = str(payload)
148
+ # aplicar + avanzar 1 paso del sistema
149
+ _apply_glyph_to_targets(G, g, curr_target)
150
+ _advance(G, step_fn)
151
+ trace.append({"t": float(G.graph.get("_t", 0.0)), "op": "GLYPH", "g": g})
152
+ continue
153
+
154
+ # ---------------------
155
+ # Helpers para construir secuencias de manera cómoda
156
+ # ---------------------
157
+
158
+ def seq(*tokens: Token) -> List[Token]:
159
+ return list(tokens)
160
+
161
+ def block(*tokens: Token, repeat: int = 1, close: Optional[Glyph] = None) -> THOL:
162
+ return THOL(body=list(tokens), repeat=repeat, force_close=close)
163
+
164
+ def target(nodes: Optional[Iterable[Node]] = None) -> TARGET:
165
+ return TARGET(nodes=nodes)
166
+
167
+ def wait(steps: int = 1) -> WAIT:
168
+ return WAIT(steps=max(1, int(steps)))
tnfr/scenarios.py ADDED
@@ -0,0 +1,28 @@
1
+ from __future__ import annotations
2
+ from typing import Any
3
+ import random
4
+ import networkx as nx
5
+
6
+ from .constants import inject_defaults, DEFAULTS
7
+
8
+
9
+ def build_graph(n: int = 24, topology: str = "ring", seed: int | None = 1):
10
+ rng = random.Random(seed)
11
+ if topology == "ring":
12
+ G = nx.cycle_graph(n)
13
+ elif topology == "complete":
14
+ G = nx.complete_graph(n)
15
+ elif topology == "erdos":
16
+ G = nx.gnp_random_graph(n, 3.0 / n, seed=seed)
17
+ else:
18
+ G = nx.path_graph(n)
19
+
20
+ for i in G.nodes():
21
+ nd = G.nodes[i]
22
+ nd.setdefault("EPI", rng.uniform(0.1, 0.3))
23
+ nd.setdefault("νf", rng.uniform(0.8, 1.2))
24
+ nd.setdefault("θ", rng.uniform(-3.1416, 3.1416))
25
+ nd.setdefault("Si", rng.uniform(0.4, 0.7))
26
+
27
+ inject_defaults(G, DEFAULTS)
28
+ return G