tnfr 4.5.1__py3-none-any.whl → 6.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (170) hide show
  1. tnfr/__init__.py +270 -90
  2. tnfr/__init__.pyi +40 -0
  3. tnfr/_compat.py +11 -0
  4. tnfr/_version.py +7 -0
  5. tnfr/_version.pyi +7 -0
  6. tnfr/alias.py +631 -0
  7. tnfr/alias.pyi +140 -0
  8. tnfr/cache.py +732 -0
  9. tnfr/cache.pyi +232 -0
  10. tnfr/callback_utils.py +381 -0
  11. tnfr/callback_utils.pyi +105 -0
  12. tnfr/cli/__init__.py +89 -0
  13. tnfr/cli/__init__.pyi +47 -0
  14. tnfr/cli/arguments.py +199 -0
  15. tnfr/cli/arguments.pyi +33 -0
  16. tnfr/cli/execution.py +322 -0
  17. tnfr/cli/execution.pyi +80 -0
  18. tnfr/cli/utils.py +34 -0
  19. tnfr/cli/utils.pyi +8 -0
  20. tnfr/config/__init__.py +12 -0
  21. tnfr/config/__init__.pyi +8 -0
  22. tnfr/config/constants.py +104 -0
  23. tnfr/config/constants.pyi +12 -0
  24. tnfr/config/init.py +36 -0
  25. tnfr/config/init.pyi +8 -0
  26. tnfr/config/operator_names.py +106 -0
  27. tnfr/config/operator_names.pyi +28 -0
  28. tnfr/config/presets.py +104 -0
  29. tnfr/config/presets.pyi +7 -0
  30. tnfr/constants/__init__.py +228 -0
  31. tnfr/constants/__init__.pyi +104 -0
  32. tnfr/constants/core.py +158 -0
  33. tnfr/constants/core.pyi +17 -0
  34. tnfr/constants/init.py +31 -0
  35. tnfr/constants/init.pyi +12 -0
  36. tnfr/constants/metric.py +102 -0
  37. tnfr/constants/metric.pyi +19 -0
  38. tnfr/constants_glyphs.py +16 -0
  39. tnfr/constants_glyphs.pyi +12 -0
  40. tnfr/dynamics/__init__.py +136 -0
  41. tnfr/dynamics/__init__.pyi +83 -0
  42. tnfr/dynamics/adaptation.py +201 -0
  43. tnfr/dynamics/aliases.py +22 -0
  44. tnfr/dynamics/coordination.py +343 -0
  45. tnfr/dynamics/dnfr.py +2315 -0
  46. tnfr/dynamics/dnfr.pyi +33 -0
  47. tnfr/dynamics/integrators.py +561 -0
  48. tnfr/dynamics/integrators.pyi +35 -0
  49. tnfr/dynamics/runtime.py +521 -0
  50. tnfr/dynamics/sampling.py +34 -0
  51. tnfr/dynamics/sampling.pyi +7 -0
  52. tnfr/dynamics/selectors.py +680 -0
  53. tnfr/execution.py +216 -0
  54. tnfr/execution.pyi +65 -0
  55. tnfr/flatten.py +283 -0
  56. tnfr/flatten.pyi +28 -0
  57. tnfr/gamma.py +320 -89
  58. tnfr/gamma.pyi +40 -0
  59. tnfr/glyph_history.py +337 -0
  60. tnfr/glyph_history.pyi +53 -0
  61. tnfr/grammar.py +23 -153
  62. tnfr/grammar.pyi +13 -0
  63. tnfr/helpers/__init__.py +151 -0
  64. tnfr/helpers/__init__.pyi +66 -0
  65. tnfr/helpers/numeric.py +88 -0
  66. tnfr/helpers/numeric.pyi +12 -0
  67. tnfr/immutable.py +214 -0
  68. tnfr/immutable.pyi +37 -0
  69. tnfr/initialization.py +199 -0
  70. tnfr/initialization.pyi +73 -0
  71. tnfr/io.py +311 -0
  72. tnfr/io.pyi +11 -0
  73. tnfr/locking.py +37 -0
  74. tnfr/locking.pyi +7 -0
  75. tnfr/metrics/__init__.py +41 -0
  76. tnfr/metrics/__init__.pyi +20 -0
  77. tnfr/metrics/coherence.py +1469 -0
  78. tnfr/metrics/common.py +149 -0
  79. tnfr/metrics/common.pyi +15 -0
  80. tnfr/metrics/core.py +259 -0
  81. tnfr/metrics/core.pyi +13 -0
  82. tnfr/metrics/diagnosis.py +840 -0
  83. tnfr/metrics/diagnosis.pyi +89 -0
  84. tnfr/metrics/export.py +151 -0
  85. tnfr/metrics/glyph_timing.py +369 -0
  86. tnfr/metrics/reporting.py +152 -0
  87. tnfr/metrics/reporting.pyi +12 -0
  88. tnfr/metrics/sense_index.py +294 -0
  89. tnfr/metrics/sense_index.pyi +9 -0
  90. tnfr/metrics/trig.py +216 -0
  91. tnfr/metrics/trig.pyi +12 -0
  92. tnfr/metrics/trig_cache.py +105 -0
  93. tnfr/metrics/trig_cache.pyi +10 -0
  94. tnfr/node.py +255 -177
  95. tnfr/node.pyi +161 -0
  96. tnfr/observers.py +154 -150
  97. tnfr/observers.pyi +46 -0
  98. tnfr/ontosim.py +135 -134
  99. tnfr/ontosim.pyi +33 -0
  100. tnfr/operators/__init__.py +452 -0
  101. tnfr/operators/__init__.pyi +31 -0
  102. tnfr/operators/definitions.py +181 -0
  103. tnfr/operators/definitions.pyi +92 -0
  104. tnfr/operators/jitter.py +266 -0
  105. tnfr/operators/jitter.pyi +11 -0
  106. tnfr/operators/registry.py +80 -0
  107. tnfr/operators/registry.pyi +15 -0
  108. tnfr/operators/remesh.py +569 -0
  109. tnfr/presets.py +10 -23
  110. tnfr/presets.pyi +7 -0
  111. tnfr/py.typed +0 -0
  112. tnfr/rng.py +440 -0
  113. tnfr/rng.pyi +14 -0
  114. tnfr/selector.py +217 -0
  115. tnfr/selector.pyi +19 -0
  116. tnfr/sense.py +307 -142
  117. tnfr/sense.pyi +30 -0
  118. tnfr/structural.py +69 -164
  119. tnfr/structural.pyi +46 -0
  120. tnfr/telemetry/__init__.py +13 -0
  121. tnfr/telemetry/verbosity.py +37 -0
  122. tnfr/tokens.py +61 -0
  123. tnfr/tokens.pyi +41 -0
  124. tnfr/trace.py +520 -95
  125. tnfr/trace.pyi +68 -0
  126. tnfr/types.py +382 -17
  127. tnfr/types.pyi +145 -0
  128. tnfr/utils/__init__.py +158 -0
  129. tnfr/utils/__init__.pyi +133 -0
  130. tnfr/utils/cache.py +755 -0
  131. tnfr/utils/cache.pyi +156 -0
  132. tnfr/utils/data.py +267 -0
  133. tnfr/utils/data.pyi +73 -0
  134. tnfr/utils/graph.py +87 -0
  135. tnfr/utils/graph.pyi +10 -0
  136. tnfr/utils/init.py +746 -0
  137. tnfr/utils/init.pyi +85 -0
  138. tnfr/utils/io.py +157 -0
  139. tnfr/utils/io.pyi +10 -0
  140. tnfr/utils/validators.py +130 -0
  141. tnfr/utils/validators.pyi +19 -0
  142. tnfr/validation/__init__.py +25 -0
  143. tnfr/validation/__init__.pyi +17 -0
  144. tnfr/validation/compatibility.py +59 -0
  145. tnfr/validation/compatibility.pyi +8 -0
  146. tnfr/validation/grammar.py +149 -0
  147. tnfr/validation/grammar.pyi +11 -0
  148. tnfr/validation/rules.py +194 -0
  149. tnfr/validation/rules.pyi +18 -0
  150. tnfr/validation/syntax.py +151 -0
  151. tnfr/validation/syntax.pyi +7 -0
  152. tnfr-6.0.0.dist-info/METADATA +135 -0
  153. tnfr-6.0.0.dist-info/RECORD +157 -0
  154. tnfr/cli.py +0 -322
  155. tnfr/config.py +0 -41
  156. tnfr/constants.py +0 -277
  157. tnfr/dynamics.py +0 -814
  158. tnfr/helpers.py +0 -264
  159. tnfr/main.py +0 -47
  160. tnfr/metrics.py +0 -597
  161. tnfr/operators.py +0 -525
  162. tnfr/program.py +0 -176
  163. tnfr/scenarios.py +0 -34
  164. tnfr/validators.py +0 -38
  165. tnfr-4.5.1.dist-info/METADATA +0 -221
  166. tnfr-4.5.1.dist-info/RECORD +0 -28
  167. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/WHEEL +0 -0
  168. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/entry_points.txt +0 -0
  169. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/licenses/LICENSE.md +0 -0
  170. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,89 @@
1
+ from typing import Any, Mapping, Sequence
2
+
3
+ __all__: Any
4
+
5
+ def __getattr__(name: str) -> Any: ...
6
+
7
+ ALIAS_DNFR: Any
8
+ ALIAS_EPI: Any
9
+ ALIAS_SI: Any
10
+ ALIAS_VF: Any
11
+ CallbackEvent: Any
12
+ CoherenceMatrixPayload: Any
13
+ Iterable: Any
14
+ ProcessPoolExecutor: Any
15
+ StatisticsError: Any
16
+ TNFRGraph: Any
17
+ TRANSITION: Any
18
+ VF_KEY: Any
19
+ annotations: Any
20
+ append_metric: Any
21
+ callback_manager: Any
22
+ clamp01: Any
23
+ coherence_matrix: Any
24
+ compute_dnfr_accel_max: Any
25
+ compute_theta_trig: Any
26
+ dissonance_events: Any
27
+ ensure_history: Any
28
+ fmean: Any
29
+ ge: Any
30
+ get_aliases: Any
31
+ get_attr: Any
32
+ get_numpy: Any
33
+ get_param: Any
34
+ get_trig_cache: Any
35
+ le: Any
36
+ local_phase_sync: Any
37
+ math: Any
38
+ min_max_range: Any
39
+ normalize_dnfr: Any
40
+ partial: Any
41
+ register_diagnosis_callbacks: Any
42
+ similarity_abs: Any
43
+
44
+ class RLocalWorkerArgs:
45
+ chunk: Sequence[Any]
46
+ coherence_nodes: Sequence[Any]
47
+ weight_matrix: Any
48
+ weight_index: Mapping[Any, int]
49
+ neighbors_map: Mapping[Any, tuple[Any, ...]]
50
+ cos_map: Mapping[Any, float]
51
+ sin_map: Mapping[Any, float]
52
+
53
+ def __init__(
54
+ self,
55
+ chunk: Sequence[Any],
56
+ coherence_nodes: Sequence[Any],
57
+ weight_matrix: Any,
58
+ weight_index: Mapping[Any, int],
59
+ neighbors_map: Mapping[Any, tuple[Any, ...]],
60
+ cos_map: Mapping[Any, float],
61
+ sin_map: Mapping[Any, float],
62
+ ) -> None: ...
63
+
64
+
65
+ class NeighborMeanWorkerArgs:
66
+ chunk: Sequence[Any]
67
+ neighbors_map: Mapping[Any, tuple[Any, ...]]
68
+ epi_map: Mapping[Any, float]
69
+
70
+ def __init__(
71
+ self,
72
+ chunk: Sequence[Any],
73
+ neighbors_map: Mapping[Any, tuple[Any, ...]],
74
+ epi_map: Mapping[Any, float],
75
+ ) -> None: ...
76
+
77
+
78
+ def _rlocal_worker(args: RLocalWorkerArgs) -> list[float]: ...
79
+
80
+ def _neighbor_mean_worker(args: NeighborMeanWorkerArgs) -> list[float | None]: ...
81
+
82
+ def _state_from_thresholds(Rloc: float, dnfr_n: float, cfg: Mapping[str, Any]) -> str: ...
83
+
84
+ def _recommendation(state: str, cfg: Mapping[str, Any]) -> list[Any]: ...
85
+
86
+ def _get_last_weights(
87
+ G: TNFRGraph,
88
+ hist: Mapping[str, Sequence[CoherenceMatrixPayload | None]],
89
+ ) -> tuple[CoherenceMatrixPayload | None, CoherenceMatrixPayload | None]: ...
tnfr/metrics/export.py ADDED
@@ -0,0 +1,151 @@
1
+ """Metrics export."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import csv
6
+ import math
7
+ from collections.abc import Iterable, Iterator, Sequence
8
+ from itertools import zip_longest, tee
9
+ from typing import Mapping, TextIO
10
+
11
+ from ..config.constants import GLYPHS_CANONICAL
12
+ from ..glyph_history import ensure_history
13
+ from ..io import safe_write
14
+ from ..utils import json_dumps
15
+ from ..types import Graph
16
+ from .core import glyphogram_series
17
+ from .glyph_timing import SigmaTrace
18
+
19
+
20
+ def _write_csv(
21
+ path: str,
22
+ headers: Sequence[str],
23
+ rows: Iterable[Sequence[object]],
24
+ ) -> None:
25
+ def _write(f: TextIO) -> None:
26
+ writer = csv.writer(f)
27
+ writer.writerow(headers)
28
+ for row in rows:
29
+ writer.writerow(row)
30
+
31
+ safe_write(path, _write, newline="")
32
+
33
+
34
+ def _iter_glif_rows(
35
+ glyph: Mapping[str, Sequence[float]],
36
+ ) -> Iterator[list[float]]:
37
+ ts = glyph.get("t", [])
38
+ # Precompute columns for each glyph to avoid repeated lookups.
39
+ # ``default_col`` is shared by reference for missing glyphs to prevent
40
+ # unnecessary list allocations.
41
+ default_col = [0] * len(ts)
42
+ cols = [glyph.get(g, default_col) for g in GLYPHS_CANONICAL]
43
+ for i, t in enumerate(ts):
44
+ yield [t] + [col[i] for col in cols]
45
+
46
+
47
+ def export_metrics(G: Graph, base_path: str, fmt: str = "csv") -> None:
48
+ """Dump glyphogram and σ(t) trace to compact CSV or JSON files."""
49
+ hist = ensure_history(G)
50
+ glyph = glyphogram_series(G)
51
+ sigma_x = hist.get("sense_sigma_x", [])
52
+ sigma_y = hist.get("sense_sigma_y", [])
53
+ sigma_mag = hist.get("sense_sigma_mag", [])
54
+ sigma_angle = hist.get("sense_sigma_angle", [])
55
+ t_series = hist.get("sense_sigma_t", []) or glyph.get("t", [])
56
+ rows_raw = zip_longest(
57
+ t_series, sigma_x, sigma_y, sigma_mag, sigma_angle, fillvalue=None
58
+ )
59
+
60
+ def _clean(value: float | None) -> float:
61
+ """Return ``0`` for ``None`` or ``NaN`` values."""
62
+ if value is None or (isinstance(value, float) and math.isnan(value)):
63
+ return 0
64
+ return value
65
+
66
+ def _gen_rows() -> Iterator[tuple[float, float, float, float, float]]:
67
+ for i, (t, x, y, m, a) in enumerate(rows_raw):
68
+ yield (
69
+ i if t is None else t,
70
+ _clean(x),
71
+ _clean(y),
72
+ _clean(m),
73
+ _clean(a),
74
+ )
75
+
76
+ rows_csv, rows_sigma = tee(_gen_rows())
77
+
78
+ sigma: SigmaTrace = {
79
+ "t": [],
80
+ "sigma_x": [],
81
+ "sigma_y": [],
82
+ "mag": [],
83
+ "angle": [],
84
+ }
85
+ for t, x, y, m, a in rows_sigma:
86
+ sigma["t"].append(t)
87
+ sigma["sigma_x"].append(x)
88
+ sigma["sigma_y"].append(y)
89
+ sigma["mag"].append(m)
90
+ sigma["angle"].append(a)
91
+ morph: Sequence[Mapping[str, float]] = hist.get("morph", [])
92
+ epi_supp: Sequence[Mapping[str, float]] = hist.get("EPI_support", [])
93
+ fmt = fmt.lower()
94
+ if fmt not in {"csv", "json"}:
95
+ raise ValueError(f"Unsupported export format: {fmt}")
96
+ if fmt == "csv":
97
+ specs: list[tuple[str, Sequence[str], Iterable[Sequence[object]]]] = [
98
+ (
99
+ "_glyphogram.csv",
100
+ ["t", *GLYPHS_CANONICAL],
101
+ _iter_glif_rows(glyph),
102
+ ),
103
+ (
104
+ "_sigma.csv",
105
+ ["t", "x", "y", "mag", "angle"],
106
+ ([t, x, y, m, a] for t, x, y, m, a in rows_csv),
107
+ ),
108
+ ]
109
+ if morph:
110
+ specs.append(
111
+ (
112
+ "_morph.csv",
113
+ ["t", "ID", "CM", "NE", "PP"],
114
+ (
115
+ [
116
+ row.get("t"),
117
+ row.get("ID"),
118
+ row.get("CM"),
119
+ row.get("NE"),
120
+ row.get("PP"),
121
+ ]
122
+ for row in morph
123
+ ),
124
+ )
125
+ )
126
+ if epi_supp:
127
+ specs.append(
128
+ (
129
+ "_epi_support.csv",
130
+ ["t", "size", "epi_norm"],
131
+ (
132
+ [row.get("t"), row.get("size"), row.get("epi_norm")]
133
+ for row in epi_supp
134
+ ),
135
+ )
136
+ )
137
+ for suffix, headers, rows in specs:
138
+ _write_csv(base_path + suffix, headers, rows)
139
+ else:
140
+ data = {
141
+ "glyphogram": glyph,
142
+ "sigma": sigma,
143
+ "morph": morph,
144
+ "epi_support": epi_supp,
145
+ }
146
+ json_path = base_path + ".json"
147
+
148
+ def _write_json(f: TextIO) -> None:
149
+ f.write(json_dumps(data))
150
+
151
+ safe_write(json_path, _write_json)
@@ -0,0 +1,369 @@
1
+ """Glyph timing utilities and advanced metrics."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from collections import Counter, defaultdict
6
+ from concurrent.futures import ProcessPoolExecutor
7
+ from dataclasses import dataclass
8
+ import math
9
+ from types import ModuleType
10
+ from typing import Any, Callable, Mapping, MutableMapping, MutableSequence, Sequence, TypedDict, cast
11
+
12
+ from ..alias import get_attr
13
+ from ..constants import get_aliases, get_param
14
+ from ..config.constants import GLYPH_GROUPS, GLYPHS_CANONICAL
15
+ from ..glyph_history import append_metric, last_glyph
16
+ from ..types import GraphLike
17
+
18
+ ALIAS_EPI = get_aliases("EPI")
19
+
20
+ LATENT_GLYPH: str = "SHA"
21
+ DEFAULT_EPI_SUPPORT_LIMIT = 0.05
22
+
23
+ try: # pragma: no cover - import guard exercised via tests
24
+ import numpy as _np # type: ignore[import-not-found]
25
+ except Exception: # pragma: no cover - numpy optional dependency
26
+ _np = None
27
+
28
+ np: ModuleType | None = cast(ModuleType | None, _np)
29
+
30
+
31
+ def _has_numpy_support(np_obj: object) -> bool:
32
+ """Return ``True`` when ``np_obj`` exposes the required NumPy API."""
33
+
34
+ return isinstance(np_obj, ModuleType) or (
35
+ np_obj is not None
36
+ and hasattr(np_obj, "fromiter")
37
+ and hasattr(np_obj, "bincount")
38
+ )
39
+
40
+
41
+ class SigmaTrace(TypedDict):
42
+ """Time-aligned σ(t) trace exported alongside glyphograms."""
43
+
44
+ t: list[float]
45
+ sigma_x: list[float]
46
+ sigma_y: list[float]
47
+ mag: list[float]
48
+ angle: list[float]
49
+
50
+
51
+ GlyphogramRow = MutableMapping[str, float]
52
+ GlyphTimingTotals = MutableMapping[str, float]
53
+ GlyphTimingByNode = MutableMapping[Any, MutableMapping[str, MutableSequence[float]]]
54
+ GlyphCounts = Mapping[str, int]
55
+ GlyphMetricsHistoryValue = MutableMapping[Any, Any] | MutableSequence[Any]
56
+ GlyphMetricsHistory = MutableMapping[str, GlyphMetricsHistoryValue]
57
+ MetricsListHistory = MutableMapping[str, list[Any]]
58
+
59
+ _GLYPH_TO_INDEX = {glyph: idx for idx, glyph in enumerate(GLYPHS_CANONICAL)}
60
+
61
+
62
+ def _coerce_float(value: Any) -> float:
63
+ """Attempt to coerce ``value`` to ``float`` returning ``0.0`` on failure."""
64
+
65
+ try:
66
+ return float(value)
67
+ except (TypeError, ValueError):
68
+ return 0.0
69
+
70
+
71
+ @dataclass
72
+ class GlyphTiming:
73
+ curr: str | None = None
74
+ run: float = 0.0
75
+
76
+
77
+ __all__ = [
78
+ "LATENT_GLYPH",
79
+ "GlyphTiming",
80
+ "SigmaTrace",
81
+ "GlyphogramRow",
82
+ "GlyphTimingTotals",
83
+ "GlyphTimingByNode",
84
+ "_tg_state",
85
+ "for_each_glyph",
86
+ "_update_tg_node",
87
+ "_update_tg",
88
+ "_update_glyphogram",
89
+ "_update_latency_index",
90
+ "_update_epi_support",
91
+ "_update_morph_metrics",
92
+ "_compute_advanced_metrics",
93
+ ]
94
+
95
+
96
+ # ---------------------------------------------------------------------------
97
+ # Internal utilities
98
+ # ---------------------------------------------------------------------------
99
+
100
+
101
+ def _count_glyphs_chunk(chunk: Sequence[str]) -> Counter[str]:
102
+ """Count glyph occurrences within a chunk (multiprocessing helper)."""
103
+
104
+ counter: Counter[str] = Counter()
105
+ for glyph in chunk:
106
+ counter[glyph] += 1
107
+ return counter
108
+
109
+
110
+ def _epi_support_chunk(values: Sequence[float], threshold: float) -> tuple[float, int]:
111
+ """Compute EPI support contribution for a chunk."""
112
+
113
+ total = 0.0
114
+ count = 0
115
+ for value in values:
116
+ if value >= threshold:
117
+ total += value
118
+ count += 1
119
+ return total, count
120
+
121
+
122
+ def _tg_state(nd: MutableMapping[str, Any]) -> GlyphTiming:
123
+ """Expose per-node glyph timing state."""
124
+
125
+ return nd.setdefault("_Tg", GlyphTiming())
126
+
127
+
128
+ def for_each_glyph(fn: Callable[[str], None]) -> None:
129
+ """Apply ``fn`` to each canonical structural operator."""
130
+
131
+ for g in GLYPHS_CANONICAL:
132
+ fn(g)
133
+
134
+
135
+ # ---------------------------------------------------------------------------
136
+ # Glyph timing helpers
137
+ # ---------------------------------------------------------------------------
138
+
139
+
140
+ def _update_tg_node(
141
+ n: Any,
142
+ nd: MutableMapping[str, Any],
143
+ dt: float,
144
+ tg_total: GlyphTimingTotals,
145
+ tg_by_node: GlyphTimingByNode | None,
146
+ ) -> tuple[str | None, bool]:
147
+ """Track a node's glyph transition and accumulate run time."""
148
+
149
+ g = last_glyph(nd)
150
+ if not g:
151
+ return None, False
152
+ st = _tg_state(nd)
153
+ curr = st.curr
154
+ if curr is None:
155
+ st.curr = g
156
+ st.run = dt
157
+ elif g == curr:
158
+ st.run += dt
159
+ else:
160
+ dur = st.run
161
+ tg_total[curr] += dur
162
+ if tg_by_node is not None:
163
+ tg_by_node[n][curr].append(dur)
164
+ st.curr = g
165
+ st.run = dt
166
+ return g, g == LATENT_GLYPH
167
+
168
+
169
+ def _update_tg(
170
+ G: GraphLike,
171
+ hist: GlyphMetricsHistory,
172
+ dt: float,
173
+ save_by_node: bool,
174
+ n_jobs: int | None = None,
175
+ ) -> tuple[Counter[str], int, int]:
176
+ """Accumulate glyph dwell times for the entire graph."""
177
+
178
+ tg_total = cast(GlyphTimingTotals, hist.setdefault("Tg_total", defaultdict(float)))
179
+ tg_by_node = (
180
+ cast(
181
+ GlyphTimingByNode,
182
+ hist.setdefault(
183
+ "Tg_by_node",
184
+ defaultdict(lambda: defaultdict(list)),
185
+ ),
186
+ )
187
+ if save_by_node
188
+ else None
189
+ )
190
+
191
+ n_total = 0
192
+ n_latent = 0
193
+ glyph_sequence: list[str] = []
194
+ for n, nd in G.nodes(data=True):
195
+ g, is_latent = _update_tg_node(n, nd, dt, tg_total, tg_by_node)
196
+ if g is None:
197
+ continue
198
+ n_total += 1
199
+ if is_latent:
200
+ n_latent += 1
201
+ glyph_sequence.append(g)
202
+
203
+ counts: Counter[str] = Counter()
204
+ if not glyph_sequence:
205
+ return counts, n_total, n_latent
206
+
207
+ if _has_numpy_support(np):
208
+ glyph_idx = np.fromiter(
209
+ (_GLYPH_TO_INDEX[glyph] for glyph in glyph_sequence),
210
+ dtype=np.int64,
211
+ count=len(glyph_sequence),
212
+ )
213
+ freq = np.bincount(glyph_idx, minlength=len(GLYPHS_CANONICAL))
214
+ counts.update(
215
+ {
216
+ glyph: int(freq[_GLYPH_TO_INDEX[glyph]])
217
+ for glyph in GLYPHS_CANONICAL
218
+ if freq[_GLYPH_TO_INDEX[glyph]]
219
+ }
220
+ )
221
+ elif n_jobs is not None and n_jobs > 1 and len(glyph_sequence) > 1:
222
+ chunk_size = max(1, math.ceil(len(glyph_sequence) / n_jobs))
223
+ futures = []
224
+ with ProcessPoolExecutor(max_workers=n_jobs) as executor:
225
+ for start in range(0, len(glyph_sequence), chunk_size):
226
+ chunk = glyph_sequence[start:start + chunk_size]
227
+ futures.append(executor.submit(_count_glyphs_chunk, chunk))
228
+ for future in futures:
229
+ counts.update(future.result())
230
+ else:
231
+ counts.update(glyph_sequence)
232
+
233
+ return counts, n_total, n_latent
234
+
235
+
236
+ def _update_glyphogram(
237
+ G: GraphLike,
238
+ hist: GlyphMetricsHistory,
239
+ counts: GlyphCounts,
240
+ t: float,
241
+ n_total: int,
242
+ ) -> None:
243
+ """Record glyphogram row from glyph counts."""
244
+
245
+ normalize_series = bool(get_param(G, "METRICS").get("normalize_series", False))
246
+ row: GlyphogramRow = {"t": t}
247
+ total = max(1, n_total)
248
+ for g in GLYPHS_CANONICAL:
249
+ c = counts.get(g, 0)
250
+ row[g] = (c / total) if normalize_series else c
251
+ append_metric(cast(MetricsListHistory, hist), "glyphogram", row)
252
+
253
+
254
+ def _update_latency_index(
255
+ G: GraphLike,
256
+ hist: GlyphMetricsHistory,
257
+ n_total: int,
258
+ n_latent: int,
259
+ t: float,
260
+ ) -> None:
261
+ """Record latency index for the current step."""
262
+
263
+ li = n_latent / max(1, n_total)
264
+ append_metric(
265
+ cast(MetricsListHistory, hist),
266
+ "latency_index",
267
+ {"t": t, "value": li},
268
+ )
269
+
270
+
271
+ def _update_epi_support(
272
+ G: GraphLike,
273
+ hist: GlyphMetricsHistory,
274
+ t: float,
275
+ threshold: float = DEFAULT_EPI_SUPPORT_LIMIT,
276
+ n_jobs: int | None = None,
277
+ ) -> None:
278
+ """Measure EPI support and normalized magnitude."""
279
+
280
+ node_count = G.number_of_nodes()
281
+ total = 0.0
282
+ count = 0
283
+
284
+ if _has_numpy_support(np) and node_count:
285
+ epi_values = np.fromiter(
286
+ (
287
+ abs(_coerce_float(get_attr(nd, ALIAS_EPI, 0.0)))
288
+ for _, nd in G.nodes(data=True)
289
+ ),
290
+ dtype=float,
291
+ count=node_count,
292
+ )
293
+ mask = epi_values >= threshold
294
+ count = int(mask.sum())
295
+ if count:
296
+ total = float(epi_values[mask].sum())
297
+ elif n_jobs is not None and n_jobs > 1 and node_count > 1:
298
+ values = [
299
+ abs(_coerce_float(get_attr(nd, ALIAS_EPI, 0.0)))
300
+ for _, nd in G.nodes(data=True)
301
+ ]
302
+ chunk_size = max(1, math.ceil(len(values) / n_jobs))
303
+ totals: list[tuple[float, int]] = []
304
+ with ProcessPoolExecutor(max_workers=n_jobs) as executor:
305
+ futures = []
306
+ for start in range(0, len(values), chunk_size):
307
+ chunk = values[start:start + chunk_size]
308
+ futures.append(executor.submit(_epi_support_chunk, chunk, threshold))
309
+ for future in futures:
310
+ totals.append(future.result())
311
+ for part_total, part_count in totals:
312
+ total += part_total
313
+ count += part_count
314
+ else:
315
+ for _, nd in G.nodes(data=True):
316
+ epi_val = abs(_coerce_float(get_attr(nd, ALIAS_EPI, 0.0)))
317
+ if epi_val >= threshold:
318
+ total += epi_val
319
+ count += 1
320
+ epi_norm = (total / count) if count else 0.0
321
+ append_metric(
322
+ cast(MetricsListHistory, hist),
323
+ "EPI_support",
324
+ {"t": t, "size": count, "epi_norm": float(epi_norm)},
325
+ )
326
+
327
+
328
+ def _update_morph_metrics(
329
+ G: GraphLike,
330
+ hist: GlyphMetricsHistory,
331
+ counts: GlyphCounts,
332
+ t: float,
333
+ ) -> None:
334
+ """Capture morphosyntactic distribution of glyphs."""
335
+
336
+ def get_count(keys: Sequence[str]) -> int:
337
+ return sum(counts.get(k, 0) for k in keys)
338
+
339
+ total = max(1, sum(counts.values()))
340
+ id_val = get_count(GLYPH_GROUPS.get("ID", ())) / total
341
+ cm_val = get_count(GLYPH_GROUPS.get("CM", ())) / total
342
+ ne_val = get_count(GLYPH_GROUPS.get("NE", ())) / total
343
+ num = get_count(GLYPH_GROUPS.get("PP_num", ()))
344
+ den = get_count(GLYPH_GROUPS.get("PP_den", ()))
345
+ pp_val = 0.0 if den == 0 else num / den
346
+ append_metric(
347
+ cast(MetricsListHistory, hist),
348
+ "morph",
349
+ {"t": t, "ID": id_val, "CM": cm_val, "NE": ne_val, "PP": pp_val},
350
+ )
351
+
352
+
353
+ def _compute_advanced_metrics(
354
+ G: GraphLike,
355
+ hist: GlyphMetricsHistory,
356
+ t: float,
357
+ dt: float,
358
+ cfg: Mapping[str, Any],
359
+ threshold: float = DEFAULT_EPI_SUPPORT_LIMIT,
360
+ n_jobs: int | None = None,
361
+ ) -> None:
362
+ """Compute glyph timing derived metrics."""
363
+
364
+ save_by_node = bool(cfg.get("save_by_node", True))
365
+ counts, n_total, n_latent = _update_tg(G, hist, dt, save_by_node, n_jobs=n_jobs)
366
+ _update_glyphogram(G, hist, counts, t, n_total)
367
+ _update_latency_index(G, hist, n_total, n_latent, t)
368
+ _update_epi_support(G, hist, t, threshold, n_jobs=n_jobs)
369
+ _update_morph_metrics(G, hist, counts, t)