tnfr 4.5.1__py3-none-any.whl → 4.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (78) hide show
  1. tnfr/__init__.py +91 -90
  2. tnfr/alias.py +546 -0
  3. tnfr/cache.py +578 -0
  4. tnfr/callback_utils.py +388 -0
  5. tnfr/cli/__init__.py +75 -0
  6. tnfr/cli/arguments.py +177 -0
  7. tnfr/cli/execution.py +288 -0
  8. tnfr/cli/utils.py +36 -0
  9. tnfr/collections_utils.py +300 -0
  10. tnfr/config.py +19 -28
  11. tnfr/constants/__init__.py +174 -0
  12. tnfr/constants/core.py +159 -0
  13. tnfr/constants/init.py +31 -0
  14. tnfr/constants/metric.py +110 -0
  15. tnfr/constants_glyphs.py +98 -0
  16. tnfr/dynamics/__init__.py +658 -0
  17. tnfr/dynamics/dnfr.py +733 -0
  18. tnfr/dynamics/integrators.py +267 -0
  19. tnfr/dynamics/sampling.py +31 -0
  20. tnfr/execution.py +201 -0
  21. tnfr/flatten.py +283 -0
  22. tnfr/gamma.py +302 -88
  23. tnfr/glyph_history.py +290 -0
  24. tnfr/grammar.py +285 -96
  25. tnfr/graph_utils.py +84 -0
  26. tnfr/helpers/__init__.py +71 -0
  27. tnfr/helpers/numeric.py +87 -0
  28. tnfr/immutable.py +178 -0
  29. tnfr/import_utils.py +228 -0
  30. tnfr/initialization.py +197 -0
  31. tnfr/io.py +246 -0
  32. tnfr/json_utils.py +162 -0
  33. tnfr/locking.py +37 -0
  34. tnfr/logging_utils.py +116 -0
  35. tnfr/metrics/__init__.py +41 -0
  36. tnfr/metrics/coherence.py +829 -0
  37. tnfr/metrics/common.py +151 -0
  38. tnfr/metrics/core.py +101 -0
  39. tnfr/metrics/diagnosis.py +234 -0
  40. tnfr/metrics/export.py +137 -0
  41. tnfr/metrics/glyph_timing.py +189 -0
  42. tnfr/metrics/reporting.py +148 -0
  43. tnfr/metrics/sense_index.py +120 -0
  44. tnfr/metrics/trig.py +181 -0
  45. tnfr/metrics/trig_cache.py +109 -0
  46. tnfr/node.py +214 -159
  47. tnfr/observers.py +126 -136
  48. tnfr/ontosim.py +134 -134
  49. tnfr/operators/__init__.py +420 -0
  50. tnfr/operators/jitter.py +203 -0
  51. tnfr/operators/remesh.py +485 -0
  52. tnfr/presets.py +46 -14
  53. tnfr/rng.py +254 -0
  54. tnfr/selector.py +210 -0
  55. tnfr/sense.py +284 -131
  56. tnfr/structural.py +207 -79
  57. tnfr/tokens.py +60 -0
  58. tnfr/trace.py +329 -94
  59. tnfr/types.py +43 -17
  60. tnfr/validators.py +70 -24
  61. tnfr/value_utils.py +59 -0
  62. tnfr-4.5.2.dist-info/METADATA +379 -0
  63. tnfr-4.5.2.dist-info/RECORD +67 -0
  64. tnfr/cli.py +0 -322
  65. tnfr/constants.py +0 -277
  66. tnfr/dynamics.py +0 -814
  67. tnfr/helpers.py +0 -264
  68. tnfr/main.py +0 -47
  69. tnfr/metrics.py +0 -597
  70. tnfr/operators.py +0 -525
  71. tnfr/program.py +0 -176
  72. tnfr/scenarios.py +0 -34
  73. tnfr-4.5.1.dist-info/METADATA +0 -221
  74. tnfr-4.5.1.dist-info/RECORD +0 -28
  75. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/WHEEL +0 -0
  76. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/entry_points.txt +0 -0
  77. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/licenses/LICENSE.md +0 -0
  78. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,485 @@
1
+ from __future__ import annotations
2
+ import hashlib
3
+ import heapq
4
+ from operator import ge, le
5
+ from functools import cache
6
+ from itertools import combinations
7
+ from io import StringIO
8
+ from collections import deque
9
+ from statistics import fmean, StatisticsError
10
+
11
+ from ..cache import edge_version_update
12
+ from ..constants import DEFAULTS, REMESH_DEFAULTS, get_aliases, get_param
13
+ from ..helpers.numeric import kahan_sum_nd
14
+ from ..alias import get_attr, set_attr
15
+ from ..rng import make_rng
16
+ from ..callback_utils import CallbackEvent, callback_manager
17
+ from ..glyph_history import append_metric, ensure_history, current_step_idx
18
+ from ..import_utils import cached_import
19
+
20
+ ALIAS_EPI = get_aliases("EPI")
21
+
22
+
23
+ @cache
24
+ def _get_networkx_modules():
25
+ nx = cached_import("networkx")
26
+ if nx is None:
27
+ raise ImportError(
28
+ "networkx is required for network operators; install 'networkx' "
29
+ "to enable this feature"
30
+ )
31
+ nx_comm = cached_import("networkx.algorithms", "community")
32
+ if nx_comm is None:
33
+ raise ImportError(
34
+ "networkx.algorithms.community is required for community-based "
35
+ "operations; install 'networkx' to enable this feature"
36
+ )
37
+ return nx, nx_comm
38
+
39
+
40
+ def _remesh_alpha_info(G):
41
+ """Return ``(alpha, source)`` with explicit precedence."""
42
+ if bool(
43
+ G.graph.get("REMESH_ALPHA_HARD", REMESH_DEFAULTS["REMESH_ALPHA_HARD"])
44
+ ):
45
+ val = float(
46
+ G.graph.get("REMESH_ALPHA", REMESH_DEFAULTS["REMESH_ALPHA"])
47
+ )
48
+ return val, "REMESH_ALPHA"
49
+ gf = G.graph.get("GLYPH_FACTORS", DEFAULTS.get("GLYPH_FACTORS", {}))
50
+ if "REMESH_alpha" in gf:
51
+ return float(gf["REMESH_alpha"]), "GLYPH_FACTORS.REMESH_alpha"
52
+ if "REMESH_ALPHA" in G.graph:
53
+ return float(G.graph["REMESH_ALPHA"]), "REMESH_ALPHA"
54
+ return (
55
+ float(REMESH_DEFAULTS["REMESH_ALPHA"]),
56
+ "REMESH_DEFAULTS.REMESH_ALPHA",
57
+ )
58
+
59
+
60
+ def _snapshot_topology(G, nx):
61
+ """Return a hash representing the current graph topology."""
62
+ try:
63
+ n_nodes = G.number_of_nodes()
64
+ n_edges = G.number_of_edges()
65
+ degs = sorted(d for _, d in G.degree())
66
+ topo_str = f"n={n_nodes};m={n_edges};deg=" + ",".join(map(str, degs))
67
+ return hashlib.sha1(topo_str.encode()).hexdigest()[:12]
68
+ except (AttributeError, TypeError, nx.NetworkXError):
69
+ return None
70
+
71
+
72
+ def _snapshot_epi(G):
73
+ """Return ``(mean, checksum)`` of the node EPI values."""
74
+ buf = StringIO()
75
+ values = []
76
+ for n, data in G.nodes(data=True):
77
+ v = float(get_attr(data, ALIAS_EPI, 0.0))
78
+ values.append(v)
79
+ buf.write(f"{str(n)}:{round(v, 6)};")
80
+ total = kahan_sum_nd(((v,) for v in values), dims=1)[0]
81
+ mean_val = total / len(values) if values else 0.0
82
+ checksum = hashlib.sha1(buf.getvalue().encode()).hexdigest()[:12]
83
+ return float(mean_val), checksum
84
+
85
+
86
+ def _log_remesh_event(G, meta):
87
+ """Store remesh metadata and optionally log and trigger callbacks."""
88
+ G.graph["_REMESH_META"] = meta
89
+ if G.graph.get("REMESH_LOG_EVENTS", REMESH_DEFAULTS["REMESH_LOG_EVENTS"]):
90
+ hist = G.graph.setdefault("history", {})
91
+ append_metric(hist, "remesh_events", dict(meta))
92
+ callback_manager.invoke_callbacks(
93
+ G, CallbackEvent.ON_REMESH.value, dict(meta)
94
+ )
95
+
96
+
97
+ def apply_network_remesh(G) -> None:
98
+ """Network-scale REMESH using ``_epi_hist`` with multi-scale memory."""
99
+ nx, _ = _get_networkx_modules()
100
+ tau_g = int(get_param(G, "REMESH_TAU_GLOBAL"))
101
+ tau_l = int(get_param(G, "REMESH_TAU_LOCAL"))
102
+ tau_req = max(tau_g, tau_l)
103
+ alpha, alpha_src = _remesh_alpha_info(G)
104
+ G.graph["_REMESH_ALPHA_SRC"] = alpha_src
105
+ hist = G.graph.get("_epi_hist", deque())
106
+ if len(hist) < tau_req + 1:
107
+ return
108
+
109
+ past_g = hist[-(tau_g + 1)]
110
+ past_l = hist[-(tau_l + 1)]
111
+
112
+ topo_hash = _snapshot_topology(G, nx)
113
+ epi_mean_before, epi_checksum_before = _snapshot_epi(G)
114
+
115
+ for n, nd in G.nodes(data=True):
116
+ epi_now = get_attr(nd, ALIAS_EPI, 0.0)
117
+ epi_old_l = float(past_l.get(n, epi_now))
118
+ epi_old_g = float(past_g.get(n, epi_now))
119
+ mixed = (1 - alpha) * epi_now + alpha * epi_old_l
120
+ mixed = (1 - alpha) * mixed + alpha * epi_old_g
121
+ set_attr(nd, ALIAS_EPI, mixed)
122
+
123
+ epi_mean_after, epi_checksum_after = _snapshot_epi(G)
124
+
125
+ step_idx = current_step_idx(G)
126
+ meta = {
127
+ "alpha": alpha,
128
+ "alpha_source": alpha_src,
129
+ "tau_global": tau_g,
130
+ "tau_local": tau_l,
131
+ "step": step_idx,
132
+ "topo_hash": topo_hash,
133
+ "epi_mean_before": float(epi_mean_before),
134
+ "epi_mean_after": float(epi_mean_after),
135
+ "epi_checksum_before": epi_checksum_before,
136
+ "epi_checksum_after": epi_checksum_after,
137
+ }
138
+
139
+ h = ensure_history(G)
140
+ if h:
141
+ if h.get("stable_frac"):
142
+ meta["stable_frac_last"] = h["stable_frac"][-1]
143
+ if h.get("phase_sync"):
144
+ meta["phase_sync_last"] = h["phase_sync"][-1]
145
+ if h.get("glyph_load_disr"):
146
+ meta["glyph_disr_last"] = h["glyph_load_disr"][-1]
147
+
148
+ _log_remesh_event(G, meta)
149
+
150
+
151
+ def _mst_edges_from_epi(nx, nodes, epi):
152
+ """Return MST edges based on absolute EPI distance."""
153
+ H = nx.Graph()
154
+ H.add_nodes_from(nodes)
155
+ H.add_weighted_edges_from(
156
+ (u, v, abs(epi[u] - epi[v])) for u, v in combinations(nodes, 2)
157
+ )
158
+ return {
159
+ tuple(sorted((u, v)))
160
+ for u, v in nx.minimum_spanning_edges(H, data=False)
161
+ }
162
+
163
+
164
+ def _knn_edges(nodes, epi, k_val, p_rewire, rnd):
165
+ """Edges linking each node to its ``k`` nearest neighbours in EPI."""
166
+ new_edges = set()
167
+ node_set = set(nodes)
168
+ for u in nodes:
169
+ epi_u = epi[u]
170
+ neighbours = [
171
+ v
172
+ for _, v in heapq.nsmallest(
173
+ k_val,
174
+ ((abs(epi_u - epi[v]), v) for v in nodes if v != u),
175
+ )
176
+ ]
177
+ for v in neighbours:
178
+ if rnd.random() < p_rewire:
179
+ choices = list(node_set - {u, v})
180
+ if choices:
181
+ v = rnd.choice(choices)
182
+ new_edges.add(tuple(sorted((u, v))))
183
+ return new_edges
184
+
185
+
186
+ def _community_graph(comms, epi, nx):
187
+ """Return community graph ``C`` with mean EPI per community."""
188
+ C = nx.Graph()
189
+ for idx, comm in enumerate(comms):
190
+ members = list(comm)
191
+ try:
192
+ epi_mean = fmean(epi[n] for n in members)
193
+ except StatisticsError:
194
+ epi_mean = 0.0
195
+ C.add_node(idx)
196
+ set_attr(C.nodes[idx], ALIAS_EPI, epi_mean)
197
+ C.nodes[idx]["members"] = members
198
+ for i, j in combinations(C.nodes(), 2):
199
+ w = abs(
200
+ get_attr(C.nodes[i], ALIAS_EPI, 0.0)
201
+ - get_attr(C.nodes[j], ALIAS_EPI, 0.0)
202
+ )
203
+ C.add_edge(i, j, weight=w)
204
+ return C
205
+
206
+
207
+ def _community_k_neighbor_edges(C, k_val, p_rewire, rnd):
208
+ """Edges linking each community to its ``k`` nearest neighbours."""
209
+ epi_vals = {n: get_attr(C.nodes[n], ALIAS_EPI, 0.0) for n in C.nodes()}
210
+ ordered = sorted(C.nodes(), key=lambda v: epi_vals[v])
211
+ new_edges = set()
212
+ attempts = {n: 0 for n in C.nodes()}
213
+ rewired = []
214
+ node_set = set(C.nodes())
215
+ for idx, u in enumerate(ordered):
216
+ epi_u = epi_vals[u]
217
+ left = idx - 1
218
+ right = idx + 1
219
+ added = 0
220
+ while added < k_val and (left >= 0 or right < len(ordered)):
221
+ if left < 0:
222
+ v = ordered[right]
223
+ right += 1
224
+ elif right >= len(ordered):
225
+ v = ordered[left]
226
+ left -= 1
227
+ else:
228
+ if abs(epi_u - epi_vals[ordered[left]]) <= abs(
229
+ epi_vals[ordered[right]] - epi_u
230
+ ):
231
+ v = ordered[left]
232
+ left -= 1
233
+ else:
234
+ v = ordered[right]
235
+ right += 1
236
+ original_v = v
237
+ rewired_now = False
238
+ if rnd.random() < p_rewire:
239
+ choices = list(node_set - {u, original_v})
240
+ if choices:
241
+ v = rnd.choice(choices)
242
+ rewired_now = True
243
+ new_edges.add(tuple(sorted((u, v))))
244
+ attempts[u] += 1
245
+ if rewired_now:
246
+ rewired.append((u, original_v, v))
247
+ added += 1
248
+ return new_edges, attempts, rewired
249
+
250
+
251
+ def _community_remesh(
252
+ G,
253
+ epi,
254
+ k_val,
255
+ p_rewire,
256
+ rnd,
257
+ nx,
258
+ nx_comm,
259
+ mst_edges,
260
+ n_before,
261
+ ):
262
+ """Remesh ``G`` replacing nodes by modular communities."""
263
+ comms = list(nx_comm.greedy_modularity_communities(G))
264
+ if len(comms) <= 1:
265
+ with edge_version_update(G):
266
+ G.clear_edges()
267
+ G.add_edges_from(mst_edges)
268
+ return
269
+ C = _community_graph(comms, epi, nx)
270
+ mst_c = nx.minimum_spanning_tree(C, weight="weight")
271
+ new_edges = set(mst_c.edges())
272
+ extra_edges, attempts, rewired_edges = _community_k_neighbor_edges(
273
+ C, k_val, p_rewire, rnd
274
+ )
275
+ new_edges |= extra_edges
276
+
277
+ extra_degrees = {idx: 0 for idx in C.nodes()}
278
+ for u, v in extra_edges:
279
+ extra_degrees[u] += 1
280
+ extra_degrees[v] += 1
281
+
282
+ with edge_version_update(G):
283
+ G.clear_edges()
284
+ G.remove_nodes_from(list(G.nodes()))
285
+ for idx in C.nodes():
286
+ data = dict(C.nodes[idx])
287
+ G.add_node(idx, **data)
288
+ G.add_edges_from(new_edges)
289
+
290
+ if G.graph.get("REMESH_LOG_EVENTS", REMESH_DEFAULTS["REMESH_LOG_EVENTS"]):
291
+ hist = G.graph.setdefault("history", {})
292
+ mapping = {idx: C.nodes[idx].get("members", []) for idx in C.nodes()}
293
+ append_metric(
294
+ hist,
295
+ "remesh_events",
296
+ {
297
+ "mode": "community",
298
+ "n_before": n_before,
299
+ "n_after": G.number_of_nodes(),
300
+ "mapping": mapping,
301
+ "k": int(k_val),
302
+ "p_rewire": float(p_rewire),
303
+ "extra_edges_added": len(extra_edges),
304
+ "extra_edge_attempts": attempts,
305
+ "extra_edge_degrees": extra_degrees,
306
+ "rewired_edges": [
307
+ {"source": int(u), "from": int(v0), "to": int(v1)}
308
+ for u, v0, v1 in rewired_edges
309
+ ],
310
+ },
311
+ )
312
+
313
+
314
+ def apply_topological_remesh(
315
+ G,
316
+ mode: str | None = None,
317
+ *,
318
+ k: int | None = None,
319
+ p_rewire: float = 0.2,
320
+ seed: int | None = None,
321
+ ) -> None:
322
+ """Approximate topological remeshing.
323
+
324
+ When ``seed`` is ``None`` the RNG draws its base seed from
325
+ ``G.graph['RANDOM_SEED']`` to keep runs reproducible.
326
+ """
327
+ nodes = list(G.nodes())
328
+ n_before = len(nodes)
329
+ if n_before <= 1:
330
+ return
331
+ if seed is None:
332
+ base_seed = int(G.graph.get("RANDOM_SEED", 0))
333
+ else:
334
+ base_seed = int(seed)
335
+ rnd = make_rng(base_seed, -2, G)
336
+
337
+ if mode is None:
338
+ mode = str(
339
+ G.graph.get(
340
+ "REMESH_MODE", REMESH_DEFAULTS.get("REMESH_MODE", "knn")
341
+ )
342
+ )
343
+ mode = str(mode)
344
+ nx, nx_comm = _get_networkx_modules()
345
+ epi = {n: get_attr(G.nodes[n], ALIAS_EPI, 0.0) for n in nodes}
346
+ mst_edges = _mst_edges_from_epi(nx, nodes, epi)
347
+ default_k = int(
348
+ G.graph.get(
349
+ "REMESH_COMMUNITY_K", REMESH_DEFAULTS.get("REMESH_COMMUNITY_K", 2)
350
+ )
351
+ )
352
+ k_val = max(1, int(k) if k is not None else default_k)
353
+
354
+ if mode == "community":
355
+ _community_remesh(
356
+ G,
357
+ epi,
358
+ k_val,
359
+ p_rewire,
360
+ rnd,
361
+ nx,
362
+ nx_comm,
363
+ mst_edges,
364
+ n_before,
365
+ )
366
+ return
367
+
368
+ new_edges = set(mst_edges)
369
+ if mode == "knn":
370
+ new_edges |= _knn_edges(nodes, epi, k_val, p_rewire, rnd)
371
+
372
+ with edge_version_update(G):
373
+ G.clear_edges()
374
+ G.add_edges_from(new_edges)
375
+
376
+
377
+ def _extra_gating_ok(hist, cfg, w_estab):
378
+ """Check additional stability gating conditions."""
379
+ checks = [
380
+ ("phase_sync", "REMESH_MIN_PHASE_SYNC", ge),
381
+ ("glyph_load_disr", "REMESH_MAX_GLYPH_DISR", le),
382
+ ("sense_sigma_mag", "REMESH_MIN_SIGMA_MAG", ge),
383
+ ("kuramoto_R", "REMESH_MIN_KURAMOTO_R", ge),
384
+ ("Si_hi_frac", "REMESH_MIN_SI_HI_FRAC", ge),
385
+ ]
386
+ for hist_key, cfg_key, op in checks:
387
+ series = hist.get(hist_key)
388
+ if series is not None and len(series) >= w_estab:
389
+ win = series[-w_estab:]
390
+ avg = sum(win) / len(win)
391
+ if not op(avg, cfg[cfg_key]):
392
+ return False
393
+ return True
394
+
395
+
396
+ def apply_remesh_if_globally_stable(
397
+ G, pasos_estables_consecutivos: int | None = None
398
+ ) -> None:
399
+ params = [
400
+ (
401
+ "REMESH_STABILITY_WINDOW",
402
+ int,
403
+ REMESH_DEFAULTS["REMESH_STABILITY_WINDOW"],
404
+ ),
405
+ (
406
+ "REMESH_REQUIRE_STABILITY",
407
+ bool,
408
+ REMESH_DEFAULTS["REMESH_REQUIRE_STABILITY"],
409
+ ),
410
+ (
411
+ "REMESH_MIN_PHASE_SYNC",
412
+ float,
413
+ REMESH_DEFAULTS["REMESH_MIN_PHASE_SYNC"],
414
+ ),
415
+ (
416
+ "REMESH_MAX_GLYPH_DISR",
417
+ float,
418
+ REMESH_DEFAULTS["REMESH_MAX_GLYPH_DISR"],
419
+ ),
420
+ (
421
+ "REMESH_MIN_SIGMA_MAG",
422
+ float,
423
+ REMESH_DEFAULTS["REMESH_MIN_SIGMA_MAG"],
424
+ ),
425
+ (
426
+ "REMESH_MIN_KURAMOTO_R",
427
+ float,
428
+ REMESH_DEFAULTS["REMESH_MIN_KURAMOTO_R"],
429
+ ),
430
+ (
431
+ "REMESH_MIN_SI_HI_FRAC",
432
+ float,
433
+ REMESH_DEFAULTS["REMESH_MIN_SI_HI_FRAC"],
434
+ ),
435
+ (
436
+ "REMESH_COOLDOWN_VENTANA",
437
+ int,
438
+ REMESH_DEFAULTS["REMESH_COOLDOWN_VENTANA"],
439
+ ),
440
+ ("REMESH_COOLDOWN_TS", float, REMESH_DEFAULTS["REMESH_COOLDOWN_TS"]),
441
+ ]
442
+ cfg = {}
443
+ for key, conv, _default in params:
444
+ cfg[key] = conv(get_param(G, key))
445
+ frac_req = float(get_param(G, "FRACTION_STABLE_REMESH"))
446
+ w_estab = (
447
+ pasos_estables_consecutivos
448
+ if pasos_estables_consecutivos is not None
449
+ else cfg["REMESH_STABILITY_WINDOW"]
450
+ )
451
+
452
+ hist = ensure_history(G)
453
+ sf = hist.setdefault("stable_frac", [])
454
+ if len(sf) < w_estab:
455
+ return
456
+ win_sf = sf[-w_estab:]
457
+ if not all(v >= frac_req for v in win_sf):
458
+ return
459
+ if cfg["REMESH_REQUIRE_STABILITY"] and not _extra_gating_ok(
460
+ hist, cfg, w_estab
461
+ ):
462
+ return
463
+
464
+ last = G.graph.get("_last_remesh_step", -(10**9))
465
+ step_idx = len(sf)
466
+ if step_idx - last < cfg["REMESH_COOLDOWN_VENTANA"]:
467
+ return
468
+ t_now = float(G.graph.get("_t", 0.0))
469
+ last_ts = float(G.graph.get("_last_remesh_ts", -1e12))
470
+ if (
471
+ cfg["REMESH_COOLDOWN_TS"] > 0
472
+ and (t_now - last_ts) < cfg["REMESH_COOLDOWN_TS"]
473
+ ):
474
+ return
475
+
476
+ apply_network_remesh(G)
477
+ G.graph["_last_remesh_step"] = step_idx
478
+ G.graph["_last_remesh_ts"] = t_now
479
+
480
+
481
+ __all__ = [
482
+ "apply_network_remesh",
483
+ "apply_topological_remesh",
484
+ "apply_remesh_if_globally_stable",
485
+ ]
tnfr/presets.py CHANGED
@@ -1,24 +1,56 @@
1
+ """Predefined configurations."""
2
+
1
3
  from __future__ import annotations
2
- from .program import seq, block, wait, ejemplo_canonico_basico
4
+ from .execution import (
5
+ CANONICAL_PRESET_NAME,
6
+ CANONICAL_PROGRAM_TOKENS,
7
+ block,
8
+ seq,
9
+ wait,
10
+ )
11
+ from .types import Glyph
12
+
13
+ __all__ = ("get_preset",)
3
14
 
4
15
 
5
16
  _PRESETS = {
6
- "arranque_resonante": seq("A’L", "E’N", "I’L", "R’A", "VA’L", "U’M", wait(3), "SH’A"),
7
- "mutacion_contenida": seq("A’L", "E’N", block("O’Z", "Z’HIR", "I’L", repeat=2), "R’A", "SH’A"),
17
+ "arranque_resonante": seq(
18
+ Glyph.AL,
19
+ Glyph.EN,
20
+ Glyph.IL,
21
+ Glyph.RA,
22
+ Glyph.VAL,
23
+ Glyph.UM,
24
+ wait(3),
25
+ Glyph.SHA,
26
+ ),
27
+ "mutacion_contenida": seq(
28
+ Glyph.AL,
29
+ Glyph.EN,
30
+ block(Glyph.OZ, Glyph.ZHIR, Glyph.IL, repeat=2),
31
+ Glyph.RA,
32
+ Glyph.SHA,
33
+ ),
8
34
  "exploracion_acople": seq(
9
- "A’L",
10
- "E’N",
11
- "I’L",
12
- "VA’L",
13
- "U’M",
14
- block("O’Z", "NA’V", "I’L", repeat=1),
15
- "R’A",
16
- "SH’A",
35
+ Glyph.AL,
36
+ Glyph.EN,
37
+ Glyph.IL,
38
+ Glyph.VAL,
39
+ Glyph.UM,
40
+ block(Glyph.OZ, Glyph.NAV, Glyph.IL, repeat=1),
41
+ Glyph.RA,
42
+ Glyph.SHA,
17
43
  ),
18
- "ejemplo_canonico": ejemplo_canonico_basico(),
44
+ CANONICAL_PRESET_NAME: list(CANONICAL_PROGRAM_TOKENS),
19
45
  # Topologías fractales: expansión/contracción modular
20
- "fractal_expand": seq(block("T’HOL", "VA’L", "U’M", repeat=2, close="NU’L"), "R’A"),
21
- "fractal_contract": seq(block("T’HOL", "NU’L", "U’M", repeat=2, close="SH’A"), "R’A"),
46
+ "fractal_expand": seq(
47
+ block(Glyph.THOL, Glyph.VAL, Glyph.UM, repeat=2, close=Glyph.NUL),
48
+ Glyph.RA,
49
+ ),
50
+ "fractal_contract": seq(
51
+ block(Glyph.THOL, Glyph.NUL, Glyph.UM, repeat=2, close=Glyph.SHA),
52
+ Glyph.RA,
53
+ ),
22
54
  }
23
55
 
24
56