tnfr 4.5.1__py3-none-any.whl → 4.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (78) hide show
  1. tnfr/__init__.py +91 -90
  2. tnfr/alias.py +546 -0
  3. tnfr/cache.py +578 -0
  4. tnfr/callback_utils.py +388 -0
  5. tnfr/cli/__init__.py +75 -0
  6. tnfr/cli/arguments.py +177 -0
  7. tnfr/cli/execution.py +288 -0
  8. tnfr/cli/utils.py +36 -0
  9. tnfr/collections_utils.py +300 -0
  10. tnfr/config.py +19 -28
  11. tnfr/constants/__init__.py +174 -0
  12. tnfr/constants/core.py +159 -0
  13. tnfr/constants/init.py +31 -0
  14. tnfr/constants/metric.py +110 -0
  15. tnfr/constants_glyphs.py +98 -0
  16. tnfr/dynamics/__init__.py +658 -0
  17. tnfr/dynamics/dnfr.py +733 -0
  18. tnfr/dynamics/integrators.py +267 -0
  19. tnfr/dynamics/sampling.py +31 -0
  20. tnfr/execution.py +201 -0
  21. tnfr/flatten.py +283 -0
  22. tnfr/gamma.py +302 -88
  23. tnfr/glyph_history.py +290 -0
  24. tnfr/grammar.py +285 -96
  25. tnfr/graph_utils.py +84 -0
  26. tnfr/helpers/__init__.py +71 -0
  27. tnfr/helpers/numeric.py +87 -0
  28. tnfr/immutable.py +178 -0
  29. tnfr/import_utils.py +228 -0
  30. tnfr/initialization.py +197 -0
  31. tnfr/io.py +246 -0
  32. tnfr/json_utils.py +162 -0
  33. tnfr/locking.py +37 -0
  34. tnfr/logging_utils.py +116 -0
  35. tnfr/metrics/__init__.py +41 -0
  36. tnfr/metrics/coherence.py +829 -0
  37. tnfr/metrics/common.py +151 -0
  38. tnfr/metrics/core.py +101 -0
  39. tnfr/metrics/diagnosis.py +234 -0
  40. tnfr/metrics/export.py +137 -0
  41. tnfr/metrics/glyph_timing.py +189 -0
  42. tnfr/metrics/reporting.py +148 -0
  43. tnfr/metrics/sense_index.py +120 -0
  44. tnfr/metrics/trig.py +181 -0
  45. tnfr/metrics/trig_cache.py +109 -0
  46. tnfr/node.py +214 -159
  47. tnfr/observers.py +126 -136
  48. tnfr/ontosim.py +134 -134
  49. tnfr/operators/__init__.py +420 -0
  50. tnfr/operators/jitter.py +203 -0
  51. tnfr/operators/remesh.py +485 -0
  52. tnfr/presets.py +46 -14
  53. tnfr/rng.py +254 -0
  54. tnfr/selector.py +210 -0
  55. tnfr/sense.py +284 -131
  56. tnfr/structural.py +207 -79
  57. tnfr/tokens.py +60 -0
  58. tnfr/trace.py +329 -94
  59. tnfr/types.py +43 -17
  60. tnfr/validators.py +70 -24
  61. tnfr/value_utils.py +59 -0
  62. tnfr-4.5.2.dist-info/METADATA +379 -0
  63. tnfr-4.5.2.dist-info/RECORD +67 -0
  64. tnfr/cli.py +0 -322
  65. tnfr/constants.py +0 -277
  66. tnfr/dynamics.py +0 -814
  67. tnfr/helpers.py +0 -264
  68. tnfr/main.py +0 -47
  69. tnfr/metrics.py +0 -597
  70. tnfr/operators.py +0 -525
  71. tnfr/program.py +0 -176
  72. tnfr/scenarios.py +0 -34
  73. tnfr-4.5.1.dist-info/METADATA +0 -221
  74. tnfr-4.5.1.dist-info/RECORD +0 -28
  75. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/WHEEL +0 -0
  76. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/entry_points.txt +0 -0
  77. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/licenses/LICENSE.md +0 -0
  78. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/top_level.txt +0 -0
tnfr/metrics/common.py ADDED
@@ -0,0 +1,151 @@
1
+ """Shared helpers for TNFR metrics."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from types import MappingProxyType
6
+ from typing import Any, Iterable, Mapping, Sequence
7
+
8
+ from ..alias import collect_attr, get_attr, multi_recompute_abs_max
9
+ from ..collections_utils import normalize_weights
10
+ from ..constants import DEFAULTS, get_aliases
11
+ from ..cache import edge_version_cache
12
+ from ..helpers.numeric import clamp01, kahan_sum_nd
13
+ from ..import_utils import get_numpy
14
+ from ..types import GraphLike
15
+
16
+ ALIAS_DNFR = get_aliases("DNFR")
17
+ ALIAS_D2EPI = get_aliases("D2EPI")
18
+ ALIAS_DEPI = get_aliases("DEPI")
19
+ ALIAS_VF = get_aliases("VF")
20
+
21
+ __all__ = (
22
+ "GraphLike",
23
+ "compute_coherence",
24
+ "compute_dnfr_accel_max",
25
+ "normalize_dnfr",
26
+ "ensure_neighbors_map",
27
+ "merge_graph_weights",
28
+ "merge_and_normalize_weights",
29
+ "min_max_range",
30
+ "_get_vf_dnfr_max",
31
+ )
32
+
33
+
34
+ def compute_coherence(
35
+ G: GraphLike, *, return_means: bool = False
36
+ ) -> float | tuple[float, float, float]:
37
+ """Compute global coherence ``C`` from ``ΔNFR`` and ``dEPI``."""
38
+
39
+ count = G.number_of_nodes()
40
+ if count == 0:
41
+ return (0.0, 0.0, 0.0) if return_means else 0.0
42
+
43
+ nodes = G.nodes
44
+ np = get_numpy()
45
+ dnfr_values = collect_attr(G, nodes, ALIAS_DNFR, 0.0, np=np)
46
+ depi_values = collect_attr(G, nodes, ALIAS_DEPI, 0.0, np=np)
47
+
48
+ if np is not None:
49
+ dnfr_mean = float(np.mean(np.abs(dnfr_values)))
50
+ depi_mean = float(np.mean(np.abs(depi_values)))
51
+ else:
52
+ dnfr_sum, depi_sum = kahan_sum_nd(
53
+ ((abs(d), abs(e)) for d, e in zip(dnfr_values, depi_values)),
54
+ dims=2,
55
+ )
56
+ dnfr_mean = dnfr_sum / count
57
+ depi_mean = depi_sum / count
58
+
59
+ coherence = 1.0 / (1.0 + dnfr_mean + depi_mean)
60
+ return (coherence, dnfr_mean, depi_mean) if return_means else coherence
61
+
62
+
63
+ def ensure_neighbors_map(G: GraphLike) -> Mapping[Any, Sequence[Any]]:
64
+ """Return cached neighbors list keyed by node as a read-only mapping."""
65
+
66
+ def builder() -> Mapping[Any, Sequence[Any]]:
67
+ return MappingProxyType({n: tuple(G.neighbors(n)) for n in G})
68
+
69
+ return edge_version_cache(G, "_neighbors", builder)
70
+
71
+
72
+ def merge_graph_weights(G: GraphLike, key: str) -> dict[str, float]:
73
+ """Merge default weights for ``key`` with any graph overrides."""
74
+
75
+ return {**DEFAULTS[key], **G.graph.get(key, {})}
76
+
77
+
78
+ def merge_and_normalize_weights(
79
+ G: GraphLike,
80
+ key: str,
81
+ fields: Sequence[str],
82
+ *,
83
+ default: float = 0.0,
84
+ ) -> dict[str, float]:
85
+ """Merge defaults for ``key`` and normalise ``fields``."""
86
+
87
+ w = merge_graph_weights(G, key)
88
+ return normalize_weights(
89
+ w,
90
+ fields,
91
+ default=default,
92
+ error_on_conversion=False,
93
+ error_on_negative=False,
94
+ warn_once=True,
95
+ )
96
+
97
+
98
+ def compute_dnfr_accel_max(G: GraphLike) -> dict[str, float]:
99
+ """Compute absolute maxima of |ΔNFR| and |d²EPI/dt²|."""
100
+
101
+ return multi_recompute_abs_max(
102
+ G, {"dnfr_max": ALIAS_DNFR, "accel_max": ALIAS_D2EPI}
103
+ )
104
+
105
+
106
+ def normalize_dnfr(nd: Mapping[str, Any], max_val: float) -> float:
107
+ """Normalise ``|ΔNFR|`` using ``max_val``."""
108
+
109
+ if max_val <= 0:
110
+ return 0.0
111
+ val = abs(get_attr(nd, ALIAS_DNFR, 0.0))
112
+ return clamp01(val / max_val)
113
+
114
+
115
+ def min_max_range(
116
+ values: Iterable[float], *, default: tuple[float, float] = (0.0, 0.0)
117
+ ) -> tuple[float, float]:
118
+ """Return the minimum and maximum values observed in ``values``."""
119
+
120
+ it = iter(values)
121
+ try:
122
+ first = next(it)
123
+ except StopIteration:
124
+ return default
125
+ min_val = max_val = first
126
+ for val in it:
127
+ if val < min_val:
128
+ min_val = val
129
+ elif val > max_val:
130
+ max_val = val
131
+ return min_val, max_val
132
+
133
+
134
+ def _get_vf_dnfr_max(G: GraphLike) -> tuple[float, float]:
135
+ """Ensure and return absolute maxima for ``νf`` and ``ΔNFR``."""
136
+
137
+ vfmax = G.graph.get("_vfmax")
138
+ dnfrmax = G.graph.get("_dnfrmax")
139
+ if vfmax is None or dnfrmax is None:
140
+ maxes = multi_recompute_abs_max(
141
+ G, {"_vfmax": ALIAS_VF, "_dnfrmax": ALIAS_DNFR}
142
+ )
143
+ if vfmax is None:
144
+ vfmax = maxes["_vfmax"]
145
+ if dnfrmax is None:
146
+ dnfrmax = maxes["_dnfrmax"]
147
+ G.graph["_vfmax"] = vfmax
148
+ G.graph["_dnfrmax"] = dnfrmax
149
+ vfmax = 1.0 if vfmax == 0 else vfmax
150
+ dnfrmax = 1.0 if dnfrmax == 0 else dnfrmax
151
+ return float(vfmax), float(dnfrmax)
tnfr/metrics/core.py ADDED
@@ -0,0 +1,101 @@
1
+ """Basic metrics orchestrator."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import Any
6
+
7
+ from ..callback_utils import CallbackEvent, callback_manager
8
+ from ..constants import get_param
9
+ from ..glyph_history import append_metric, ensure_history
10
+ from ..logging_utils import get_logger
11
+ from .coherence import (
12
+ _aggregate_si,
13
+ _track_stability,
14
+ _update_coherence,
15
+ _update_phase_sync,
16
+ _update_sigma,
17
+ register_coherence_callbacks,
18
+ )
19
+ from .diagnosis import register_diagnosis_callbacks
20
+ from .glyph_timing import _compute_advanced_metrics
21
+ from .reporting import (
22
+ Tg_by_node,
23
+ Tg_global,
24
+ glyphogram_series,
25
+ glyph_top,
26
+ latency_series,
27
+ )
28
+
29
+ logger = get_logger(__name__)
30
+
31
+ __all__ = [
32
+ "_metrics_step",
33
+ "register_metrics_callbacks",
34
+ "Tg_global",
35
+ "Tg_by_node",
36
+ "latency_series",
37
+ "glyphogram_series",
38
+ "glyph_top",
39
+ ]
40
+
41
+
42
+ def _metrics_step(G, ctx: dict[str, Any] | None = None):
43
+ """Update operational TNFR metrics per step."""
44
+
45
+ del ctx
46
+
47
+ cfg = get_param(G, "METRICS")
48
+ if not cfg.get("enabled", True):
49
+ return
50
+
51
+ hist = ensure_history(G)
52
+ metrics_sentinel_key = "_metrics_history_id"
53
+ history_id = id(hist)
54
+ if G.graph.get(metrics_sentinel_key) != history_id:
55
+ for k in (
56
+ "C_steps",
57
+ "stable_frac",
58
+ "phase_sync",
59
+ "glyph_load_estab",
60
+ "glyph_load_disr",
61
+ "Si_mean",
62
+ "Si_hi_frac",
63
+ "Si_lo_frac",
64
+ "delta_Si",
65
+ "B",
66
+ ):
67
+ hist.setdefault(k, [])
68
+ G.graph[metrics_sentinel_key] = history_id
69
+
70
+ dt = float(get_param(G, "DT"))
71
+ eps_dnfr = float(get_param(G, "EPS_DNFR_STABLE"))
72
+ eps_depi = float(get_param(G, "EPS_DEPI_STABLE"))
73
+ t = float(G.graph.get("_t", 0.0))
74
+
75
+ _update_coherence(G, hist)
76
+ _track_stability(G, hist, dt, eps_dnfr, eps_depi)
77
+ try:
78
+ _update_phase_sync(G, hist)
79
+ _update_sigma(G, hist)
80
+ if hist.get("C_steps") and hist.get("stable_frac"):
81
+ append_metric(
82
+ hist,
83
+ "iota",
84
+ hist["C_steps"][-1] * hist["stable_frac"][-1],
85
+ )
86
+ except (KeyError, AttributeError, TypeError) as exc:
87
+ logger.debug("observer update failed: %s", exc)
88
+
89
+ _aggregate_si(G, hist)
90
+ _compute_advanced_metrics(G, hist, t, dt, cfg)
91
+
92
+
93
+ def register_metrics_callbacks(G) -> None:
94
+ callback_manager.register_callback(
95
+ G,
96
+ event=CallbackEvent.AFTER_STEP.value,
97
+ func=_metrics_step,
98
+ name="metrics_step",
99
+ )
100
+ register_coherence_callbacks(G)
101
+ register_diagnosis_callbacks(G)
@@ -0,0 +1,234 @@
1
+ """Diagnostic metrics."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from statistics import fmean, StatisticsError
6
+ from operator import ge, le
7
+ from typing import Any
8
+
9
+ from ..constants import (
10
+ VF_KEY,
11
+ get_aliases,
12
+ get_param,
13
+ )
14
+ from ..callback_utils import CallbackEvent, callback_manager
15
+ from ..glyph_history import ensure_history, append_metric
16
+ from ..alias import get_attr
17
+ from ..helpers.numeric import clamp01, similarity_abs
18
+ from .common import compute_dnfr_accel_max, min_max_range, normalize_dnfr
19
+ from .coherence import (
20
+ local_phase_sync,
21
+ local_phase_sync_weighted,
22
+ )
23
+
24
+ ALIAS_EPI = get_aliases("EPI")
25
+ ALIAS_VF = get_aliases("VF")
26
+ ALIAS_SI = get_aliases("SI")
27
+
28
+ def _symmetry_index(
29
+ G, n, epi_min: float | None = None, epi_max: float | None = None
30
+ ):
31
+ """Compute the symmetry index for node ``n`` based on EPI values."""
32
+ nd = G.nodes[n]
33
+ epi_i = get_attr(nd, ALIAS_EPI, 0.0)
34
+ vec = G.neighbors(n)
35
+ try:
36
+ epi_bar = fmean(get_attr(G.nodes[v], ALIAS_EPI, epi_i) for v in vec)
37
+ except StatisticsError:
38
+ return 1.0
39
+ if epi_min is None or epi_max is None:
40
+ epi_iter = (get_attr(G.nodes[v], ALIAS_EPI, 0.0) for v in G.nodes())
41
+ epi_min, epi_max = min_max_range(epi_iter, default=(0.0, 1.0))
42
+ return similarity_abs(epi_i, epi_bar, epi_min, epi_max)
43
+
44
+
45
+ def _state_from_thresholds(Rloc, dnfr_n, cfg):
46
+ stb = cfg.get("stable", {"Rloc_hi": 0.8, "dnfr_lo": 0.2, "persist": 3})
47
+ dsr = cfg.get("dissonance", {"Rloc_lo": 0.4, "dnfr_hi": 0.5, "persist": 3})
48
+
49
+ stable_checks = {
50
+ "Rloc": (Rloc, float(stb["Rloc_hi"]), ge),
51
+ "dnfr": (dnfr_n, float(stb["dnfr_lo"]), le),
52
+ }
53
+ if all(comp(val, thr) for val, thr, comp in stable_checks.values()):
54
+ return "estable"
55
+
56
+ dissonant_checks = {
57
+ "Rloc": (Rloc, float(dsr["Rloc_lo"]), le),
58
+ "dnfr": (dnfr_n, float(dsr["dnfr_hi"]), ge),
59
+ }
60
+ if all(comp(val, thr) for val, thr, comp in dissonant_checks.values()):
61
+ return "disonante"
62
+
63
+ return "transicion"
64
+
65
+
66
+ def _recommendation(state, cfg):
67
+ adv = cfg.get("advice", {})
68
+ key = {
69
+ "estable": "stable",
70
+ "transicion": "transition",
71
+ "disonante": "dissonant",
72
+ }[state]
73
+ return list(adv.get(key, []))
74
+
75
+
76
+ def _get_last_weights(G, hist):
77
+ """Return last Wi and Wm matrices from history."""
78
+ CfgW = get_param(G, "COHERENCE")
79
+ Wkey = CfgW.get("Wi_history_key", "W_i")
80
+ Wm_key = CfgW.get("history_key", "W_sparse")
81
+ Wi_series = hist.get(Wkey, [])
82
+ Wm_series = hist.get(Wm_key, [])
83
+ Wi_last = Wi_series[-1] if Wi_series else None
84
+ Wm_last = Wm_series[-1] if Wm_series else None
85
+ return Wi_last, Wm_last
86
+
87
+
88
+ def _node_diagnostics(
89
+ G,
90
+ n,
91
+ i,
92
+ nodes,
93
+ node_to_index,
94
+ Wi_last,
95
+ Wm_last,
96
+ epi_min,
97
+ epi_max,
98
+ dnfr_max,
99
+ dcfg,
100
+ ):
101
+ nd = G.nodes[n]
102
+ Si = clamp01(get_attr(nd, ALIAS_SI, 0.0))
103
+ EPI = get_attr(nd, ALIAS_EPI, 0.0)
104
+ vf = get_attr(nd, ALIAS_VF, 0.0)
105
+ dnfr_n = normalize_dnfr(nd, dnfr_max)
106
+
107
+ if Wm_last is not None:
108
+ if Wm_last and isinstance(Wm_last[0], list):
109
+ row = Wm_last[i]
110
+ else:
111
+ row = Wm_last
112
+ Rloc = local_phase_sync_weighted(
113
+ G, n, nodes_order=nodes, W_row=row, node_to_index=node_to_index
114
+ )
115
+ else:
116
+ Rloc = local_phase_sync(G, n)
117
+
118
+ symm = (
119
+ _symmetry_index(G, n, epi_min=epi_min, epi_max=epi_max)
120
+ if dcfg.get("compute_symmetry", True)
121
+ else None
122
+ )
123
+ state = _state_from_thresholds(Rloc, dnfr_n, dcfg)
124
+
125
+ alerts = []
126
+ if state == "disonante" and dnfr_n >= float(
127
+ dcfg.get("dissonance", {}).get("dnfr_hi", 0.5)
128
+ ):
129
+ alerts.append("high structural tension")
130
+
131
+ advice = _recommendation(state, dcfg)
132
+
133
+ return {
134
+ "node": n,
135
+ "Si": Si,
136
+ "EPI": EPI,
137
+ VF_KEY: vf,
138
+ "dnfr_norm": dnfr_n,
139
+ "W_i": (Wi_last[i] if (Wi_last and i < len(Wi_last)) else None),
140
+ "R_local": Rloc,
141
+ "symmetry": symm,
142
+ "state": state,
143
+ "advice": advice,
144
+ "alerts": alerts,
145
+ }
146
+
147
+
148
+ def _diagnosis_step(G, ctx: dict[str, Any] | None = None):
149
+ del ctx
150
+
151
+ dcfg = get_param(G, "DIAGNOSIS")
152
+ if not dcfg.get("enabled", True):
153
+ return
154
+
155
+ hist = ensure_history(G)
156
+ key = dcfg.get("history_key", "nodal_diag")
157
+
158
+ norms = compute_dnfr_accel_max(G)
159
+ G.graph["_sel_norms"] = norms
160
+ dnfr_max = float(norms.get("dnfr_max", 1.0)) or 1.0
161
+ epi_iter = (get_attr(nd, ALIAS_EPI, 0.0) for _, nd in G.nodes(data=True))
162
+ epi_min, epi_max = min_max_range(epi_iter, default=(0.0, 1.0))
163
+
164
+ Wi_last, Wm_last = _get_last_weights(G, hist)
165
+
166
+ nodes = list(G.nodes())
167
+ node_to_index = {v: i for i, v in enumerate(nodes)}
168
+ diag = {}
169
+ for i, n in enumerate(nodes):
170
+ diag[n] = _node_diagnostics(
171
+ G,
172
+ n,
173
+ i,
174
+ nodes,
175
+ node_to_index,
176
+ Wi_last,
177
+ Wm_last,
178
+ epi_min,
179
+ epi_max,
180
+ dnfr_max,
181
+ dcfg,
182
+ )
183
+
184
+ append_metric(hist, key, diag)
185
+
186
+
187
+ def dissonance_events(G, ctx: dict[str, Any] | None = None):
188
+ """Emit per-node structural dissonance start/end events.
189
+
190
+ Events are recorded as ``"dissonance_start"`` and ``"dissonance_end"``.
191
+ """
192
+
193
+ del ctx
194
+
195
+ hist = ensure_history(G)
196
+ # eventos de disonancia se registran en ``history['events']``
197
+ norms = G.graph.get("_sel_norms", {})
198
+ dnfr_max = float(norms.get("dnfr_max", 1.0)) or 1.0
199
+ step_idx = len(hist.get("C_steps", []))
200
+ nodes = list(G.nodes())
201
+ for n in nodes:
202
+ nd = G.nodes[n]
203
+ dn = normalize_dnfr(nd, dnfr_max)
204
+ Rloc = local_phase_sync(G, n)
205
+ st = bool(nd.get("_disr_state", False))
206
+ if (not st) and dn >= 0.5 and Rloc <= 0.4:
207
+ nd["_disr_state"] = True
208
+ append_metric(
209
+ hist,
210
+ "events",
211
+ ("dissonance_start", {"node": n, "step": step_idx}),
212
+ )
213
+ elif st and dn <= 0.2 and Rloc >= 0.7:
214
+ nd["_disr_state"] = False
215
+ append_metric(
216
+ hist,
217
+ "events",
218
+ ("dissonance_end", {"node": n, "step": step_idx}),
219
+ )
220
+
221
+
222
+ def register_diagnosis_callbacks(G) -> None:
223
+ callback_manager.register_callback(
224
+ G,
225
+ event=CallbackEvent.AFTER_STEP.value,
226
+ func=_diagnosis_step,
227
+ name="diagnosis_step",
228
+ )
229
+ callback_manager.register_callback(
230
+ G,
231
+ event=CallbackEvent.AFTER_STEP.value,
232
+ func=dissonance_events,
233
+ name="dissonance_events",
234
+ )
tnfr/metrics/export.py ADDED
@@ -0,0 +1,137 @@
1
+ """Metrics export."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import csv
6
+ import math
7
+ from itertools import zip_longest, tee
8
+
9
+ from ..glyph_history import ensure_history
10
+ from ..io import safe_write
11
+ from ..constants_glyphs import GLYPHS_CANONICAL
12
+ from .core import glyphogram_series
13
+ from ..json_utils import json_dumps
14
+
15
+
16
+ def _write_csv(path, headers, rows):
17
+ def _write(f):
18
+ writer = csv.writer(f)
19
+ writer.writerow(headers)
20
+ for row in rows:
21
+ writer.writerow(row)
22
+
23
+ safe_write(path, _write, newline="")
24
+
25
+
26
+ def _iter_glif_rows(glyph):
27
+ ts = glyph.get("t", [])
28
+ # Precompute columns for each glyph to avoid repeated lookups.
29
+ # ``default_col`` is shared by reference for missing glyphs to prevent
30
+ # unnecessary list allocations.
31
+ default_col = [0] * len(ts)
32
+ cols = [glyph.get(g, default_col) for g in GLYPHS_CANONICAL]
33
+ for i, t in enumerate(ts):
34
+ yield [t] + [col[i] for col in cols]
35
+
36
+
37
+ def export_metrics(G, base_path: str, fmt: str = "csv") -> None:
38
+ """Dump glyphogram and σ(t) trace to compact CSV or JSON files."""
39
+ hist = ensure_history(G)
40
+ glyph = glyphogram_series(G)
41
+ sigma_x = hist.get("sense_sigma_x", [])
42
+ sigma_y = hist.get("sense_sigma_y", [])
43
+ sigma_mag = hist.get("sense_sigma_mag", [])
44
+ sigma_angle = hist.get("sense_sigma_angle", [])
45
+ t_series = hist.get("sense_sigma_t", []) or glyph.get("t", [])
46
+ rows_raw = zip_longest(
47
+ t_series, sigma_x, sigma_y, sigma_mag, sigma_angle, fillvalue=None
48
+ )
49
+
50
+ def _clean(value: float | None) -> float:
51
+ """Return ``0`` for ``None`` or ``NaN`` values."""
52
+ if value is None or (isinstance(value, float) and math.isnan(value)):
53
+ return 0
54
+ return value
55
+
56
+ def _gen_rows():
57
+ for i, (t, x, y, m, a) in enumerate(rows_raw):
58
+ yield (
59
+ i if t is None else t,
60
+ _clean(x),
61
+ _clean(y),
62
+ _clean(m),
63
+ _clean(a),
64
+ )
65
+
66
+ rows_csv, rows_sigma = tee(_gen_rows())
67
+
68
+ sigma: dict[str, list[float]] = {
69
+ "t": [],
70
+ "sigma_x": [],
71
+ "sigma_y": [],
72
+ "mag": [],
73
+ "angle": [],
74
+ }
75
+ for t, x, y, m, a in rows_sigma:
76
+ sigma["t"].append(t)
77
+ sigma["sigma_x"].append(x)
78
+ sigma["sigma_y"].append(y)
79
+ sigma["mag"].append(m)
80
+ sigma["angle"].append(a)
81
+ morph = hist.get("morph", [])
82
+ epi_supp = hist.get("EPI_support", [])
83
+ fmt = fmt.lower()
84
+ if fmt not in {"csv", "json"}:
85
+ raise ValueError(f"Formato de exportación no soportado: {fmt}")
86
+ if fmt == "csv":
87
+ specs = [
88
+ (
89
+ "_glyphogram.csv",
90
+ ["t", *GLYPHS_CANONICAL],
91
+ _iter_glif_rows(glyph),
92
+ ),
93
+ (
94
+ "_sigma.csv",
95
+ ["t", "x", "y", "mag", "angle"],
96
+ ([t, x, y, m, a] for t, x, y, m, a in rows_csv),
97
+ ),
98
+ ]
99
+ if morph:
100
+ specs.append(
101
+ (
102
+ "_morph.csv",
103
+ ["t", "ID", "CM", "NE", "PP"],
104
+ (
105
+ [
106
+ row.get("t"),
107
+ row.get("ID"),
108
+ row.get("CM"),
109
+ row.get("NE"),
110
+ row.get("PP"),
111
+ ]
112
+ for row in morph
113
+ ),
114
+ )
115
+ )
116
+ if epi_supp:
117
+ specs.append(
118
+ (
119
+ "_epi_support.csv",
120
+ ["t", "size", "epi_norm"],
121
+ (
122
+ [row.get("t"), row.get("size"), row.get("epi_norm")]
123
+ for row in epi_supp
124
+ ),
125
+ )
126
+ )
127
+ for suffix, headers, rows in specs:
128
+ _write_csv(base_path + suffix, headers, rows)
129
+ else:
130
+ data = {
131
+ "glyphogram": glyph,
132
+ "sigma": sigma,
133
+ "morph": morph,
134
+ "epi_support": epi_supp,
135
+ }
136
+ json_path = base_path + ".json"
137
+ safe_write(json_path, lambda f: f.write(json_dumps(data)))