tnfr 4.5.2__py3-none-any.whl → 6.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tnfr/__init__.py +228 -49
- tnfr/__init__.pyi +40 -0
- tnfr/_compat.py +11 -0
- tnfr/_version.py +7 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +106 -21
- tnfr/alias.pyi +140 -0
- tnfr/cache.py +666 -512
- tnfr/cache.pyi +232 -0
- tnfr/callback_utils.py +2 -9
- tnfr/callback_utils.pyi +105 -0
- tnfr/cli/__init__.py +21 -7
- tnfr/cli/__init__.pyi +47 -0
- tnfr/cli/arguments.py +42 -20
- tnfr/cli/arguments.pyi +33 -0
- tnfr/cli/execution.py +54 -20
- tnfr/cli/execution.pyi +80 -0
- tnfr/cli/utils.py +0 -2
- tnfr/cli/utils.pyi +8 -0
- tnfr/config/__init__.py +12 -0
- tnfr/config/__init__.pyi +8 -0
- tnfr/config/constants.py +104 -0
- tnfr/config/constants.pyi +12 -0
- tnfr/{config.py → config/init.py} +11 -7
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +106 -0
- tnfr/config/operator_names.pyi +28 -0
- tnfr/config/presets.py +104 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/constants/__init__.py +78 -24
- tnfr/constants/__init__.pyi +104 -0
- tnfr/constants/core.py +1 -2
- tnfr/constants/core.pyi +17 -0
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +4 -12
- tnfr/constants/metric.pyi +19 -0
- tnfr/constants_glyphs.py +9 -91
- tnfr/constants_glyphs.pyi +12 -0
- tnfr/dynamics/__init__.py +112 -634
- tnfr/dynamics/__init__.pyi +83 -0
- tnfr/dynamics/adaptation.py +201 -0
- tnfr/dynamics/aliases.py +22 -0
- tnfr/dynamics/coordination.py +343 -0
- tnfr/dynamics/dnfr.py +1936 -354
- tnfr/dynamics/dnfr.pyi +33 -0
- tnfr/dynamics/integrators.py +369 -75
- tnfr/dynamics/integrators.pyi +35 -0
- tnfr/dynamics/runtime.py +521 -0
- tnfr/dynamics/sampling.py +8 -5
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +680 -0
- tnfr/execution.py +56 -41
- tnfr/execution.pyi +65 -0
- tnfr/flatten.py +7 -7
- tnfr/flatten.pyi +28 -0
- tnfr/gamma.py +54 -37
- tnfr/gamma.pyi +40 -0
- tnfr/glyph_history.py +85 -38
- tnfr/glyph_history.pyi +53 -0
- tnfr/grammar.py +19 -338
- tnfr/grammar.pyi +13 -0
- tnfr/helpers/__init__.py +110 -30
- tnfr/helpers/__init__.pyi +66 -0
- tnfr/helpers/numeric.py +1 -0
- tnfr/helpers/numeric.pyi +12 -0
- tnfr/immutable.py +55 -19
- tnfr/immutable.pyi +37 -0
- tnfr/initialization.py +12 -10
- tnfr/initialization.pyi +73 -0
- tnfr/io.py +99 -34
- tnfr/io.pyi +11 -0
- tnfr/locking.pyi +7 -0
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/coherence.py +934 -294
- tnfr/metrics/common.py +1 -3
- tnfr/metrics/common.pyi +15 -0
- tnfr/metrics/core.py +192 -34
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +707 -101
- tnfr/metrics/diagnosis.pyi +89 -0
- tnfr/metrics/export.py +27 -13
- tnfr/metrics/glyph_timing.py +218 -38
- tnfr/metrics/reporting.py +22 -18
- tnfr/metrics/reporting.pyi +12 -0
- tnfr/metrics/sense_index.py +199 -25
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +53 -18
- tnfr/metrics/trig.pyi +12 -0
- tnfr/metrics/trig_cache.py +3 -7
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/node.py +148 -125
- tnfr/node.pyi +161 -0
- tnfr/observers.py +44 -30
- tnfr/observers.pyi +46 -0
- tnfr/ontosim.py +14 -13
- tnfr/ontosim.pyi +33 -0
- tnfr/operators/__init__.py +84 -52
- tnfr/operators/__init__.pyi +31 -0
- tnfr/operators/definitions.py +181 -0
- tnfr/operators/definitions.pyi +92 -0
- tnfr/operators/jitter.py +86 -23
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/registry.py +80 -0
- tnfr/operators/registry.pyi +15 -0
- tnfr/operators/remesh.py +141 -57
- tnfr/presets.py +9 -54
- tnfr/presets.pyi +7 -0
- tnfr/py.typed +0 -0
- tnfr/rng.py +259 -73
- tnfr/rng.pyi +14 -0
- tnfr/selector.py +24 -17
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +55 -43
- tnfr/sense.pyi +30 -0
- tnfr/structural.py +44 -267
- tnfr/structural.pyi +46 -0
- tnfr/telemetry/__init__.py +13 -0
- tnfr/telemetry/verbosity.py +37 -0
- tnfr/tokens.py +3 -2
- tnfr/tokens.pyi +41 -0
- tnfr/trace.py +272 -82
- tnfr/trace.pyi +68 -0
- tnfr/types.py +345 -6
- tnfr/types.pyi +145 -0
- tnfr/utils/__init__.py +158 -0
- tnfr/utils/__init__.pyi +133 -0
- tnfr/utils/cache.py +755 -0
- tnfr/utils/cache.pyi +156 -0
- tnfr/{collections_utils.py → utils/data.py} +57 -90
- tnfr/utils/data.pyi +73 -0
- tnfr/utils/graph.py +87 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +746 -0
- tnfr/utils/init.pyi +85 -0
- tnfr/{json_utils.py → utils/io.py} +13 -18
- tnfr/utils/io.pyi +10 -0
- tnfr/utils/validators.py +130 -0
- tnfr/utils/validators.pyi +19 -0
- tnfr/validation/__init__.py +25 -0
- tnfr/validation/__init__.pyi +17 -0
- tnfr/validation/compatibility.py +59 -0
- tnfr/validation/compatibility.pyi +8 -0
- tnfr/validation/grammar.py +149 -0
- tnfr/validation/grammar.pyi +11 -0
- tnfr/validation/rules.py +194 -0
- tnfr/validation/rules.pyi +18 -0
- tnfr/validation/syntax.py +151 -0
- tnfr/validation/syntax.pyi +7 -0
- tnfr-6.0.0.dist-info/METADATA +135 -0
- tnfr-6.0.0.dist-info/RECORD +157 -0
- tnfr/graph_utils.py +0 -84
- tnfr/import_utils.py +0 -228
- tnfr/logging_utils.py +0 -116
- tnfr/validators.py +0 -84
- tnfr/value_utils.py +0 -59
- tnfr-4.5.2.dist-info/METADATA +0 -379
- tnfr-4.5.2.dist-info/RECORD +0 -67
- {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/WHEEL +0 -0
- {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/licenses/LICENSE.md +0 -0
- {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
from typing import Any, Mapping, Sequence
|
|
2
|
+
|
|
3
|
+
__all__: Any
|
|
4
|
+
|
|
5
|
+
def __getattr__(name: str) -> Any: ...
|
|
6
|
+
|
|
7
|
+
ALIAS_DNFR: Any
|
|
8
|
+
ALIAS_EPI: Any
|
|
9
|
+
ALIAS_SI: Any
|
|
10
|
+
ALIAS_VF: Any
|
|
11
|
+
CallbackEvent: Any
|
|
12
|
+
CoherenceMatrixPayload: Any
|
|
13
|
+
Iterable: Any
|
|
14
|
+
ProcessPoolExecutor: Any
|
|
15
|
+
StatisticsError: Any
|
|
16
|
+
TNFRGraph: Any
|
|
17
|
+
TRANSITION: Any
|
|
18
|
+
VF_KEY: Any
|
|
19
|
+
annotations: Any
|
|
20
|
+
append_metric: Any
|
|
21
|
+
callback_manager: Any
|
|
22
|
+
clamp01: Any
|
|
23
|
+
coherence_matrix: Any
|
|
24
|
+
compute_dnfr_accel_max: Any
|
|
25
|
+
compute_theta_trig: Any
|
|
26
|
+
dissonance_events: Any
|
|
27
|
+
ensure_history: Any
|
|
28
|
+
fmean: Any
|
|
29
|
+
ge: Any
|
|
30
|
+
get_aliases: Any
|
|
31
|
+
get_attr: Any
|
|
32
|
+
get_numpy: Any
|
|
33
|
+
get_param: Any
|
|
34
|
+
get_trig_cache: Any
|
|
35
|
+
le: Any
|
|
36
|
+
local_phase_sync: Any
|
|
37
|
+
math: Any
|
|
38
|
+
min_max_range: Any
|
|
39
|
+
normalize_dnfr: Any
|
|
40
|
+
partial: Any
|
|
41
|
+
register_diagnosis_callbacks: Any
|
|
42
|
+
similarity_abs: Any
|
|
43
|
+
|
|
44
|
+
class RLocalWorkerArgs:
|
|
45
|
+
chunk: Sequence[Any]
|
|
46
|
+
coherence_nodes: Sequence[Any]
|
|
47
|
+
weight_matrix: Any
|
|
48
|
+
weight_index: Mapping[Any, int]
|
|
49
|
+
neighbors_map: Mapping[Any, tuple[Any, ...]]
|
|
50
|
+
cos_map: Mapping[Any, float]
|
|
51
|
+
sin_map: Mapping[Any, float]
|
|
52
|
+
|
|
53
|
+
def __init__(
|
|
54
|
+
self,
|
|
55
|
+
chunk: Sequence[Any],
|
|
56
|
+
coherence_nodes: Sequence[Any],
|
|
57
|
+
weight_matrix: Any,
|
|
58
|
+
weight_index: Mapping[Any, int],
|
|
59
|
+
neighbors_map: Mapping[Any, tuple[Any, ...]],
|
|
60
|
+
cos_map: Mapping[Any, float],
|
|
61
|
+
sin_map: Mapping[Any, float],
|
|
62
|
+
) -> None: ...
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class NeighborMeanWorkerArgs:
|
|
66
|
+
chunk: Sequence[Any]
|
|
67
|
+
neighbors_map: Mapping[Any, tuple[Any, ...]]
|
|
68
|
+
epi_map: Mapping[Any, float]
|
|
69
|
+
|
|
70
|
+
def __init__(
|
|
71
|
+
self,
|
|
72
|
+
chunk: Sequence[Any],
|
|
73
|
+
neighbors_map: Mapping[Any, tuple[Any, ...]],
|
|
74
|
+
epi_map: Mapping[Any, float],
|
|
75
|
+
) -> None: ...
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _rlocal_worker(args: RLocalWorkerArgs) -> list[float]: ...
|
|
79
|
+
|
|
80
|
+
def _neighbor_mean_worker(args: NeighborMeanWorkerArgs) -> list[float | None]: ...
|
|
81
|
+
|
|
82
|
+
def _state_from_thresholds(Rloc: float, dnfr_n: float, cfg: Mapping[str, Any]) -> str: ...
|
|
83
|
+
|
|
84
|
+
def _recommendation(state: str, cfg: Mapping[str, Any]) -> list[Any]: ...
|
|
85
|
+
|
|
86
|
+
def _get_last_weights(
|
|
87
|
+
G: TNFRGraph,
|
|
88
|
+
hist: Mapping[str, Sequence[CoherenceMatrixPayload | None]],
|
|
89
|
+
) -> tuple[CoherenceMatrixPayload | None, CoherenceMatrixPayload | None]: ...
|
tnfr/metrics/export.py
CHANGED
|
@@ -4,17 +4,25 @@ from __future__ import annotations
|
|
|
4
4
|
|
|
5
5
|
import csv
|
|
6
6
|
import math
|
|
7
|
+
from collections.abc import Iterable, Iterator, Sequence
|
|
7
8
|
from itertools import zip_longest, tee
|
|
9
|
+
from typing import Mapping, TextIO
|
|
8
10
|
|
|
11
|
+
from ..config.constants import GLYPHS_CANONICAL
|
|
9
12
|
from ..glyph_history import ensure_history
|
|
10
13
|
from ..io import safe_write
|
|
11
|
-
from ..
|
|
14
|
+
from ..utils import json_dumps
|
|
15
|
+
from ..types import Graph
|
|
12
16
|
from .core import glyphogram_series
|
|
13
|
-
from
|
|
17
|
+
from .glyph_timing import SigmaTrace
|
|
14
18
|
|
|
15
19
|
|
|
16
|
-
def _write_csv(
|
|
17
|
-
|
|
20
|
+
def _write_csv(
|
|
21
|
+
path: str,
|
|
22
|
+
headers: Sequence[str],
|
|
23
|
+
rows: Iterable[Sequence[object]],
|
|
24
|
+
) -> None:
|
|
25
|
+
def _write(f: TextIO) -> None:
|
|
18
26
|
writer = csv.writer(f)
|
|
19
27
|
writer.writerow(headers)
|
|
20
28
|
for row in rows:
|
|
@@ -23,7 +31,9 @@ def _write_csv(path, headers, rows):
|
|
|
23
31
|
safe_write(path, _write, newline="")
|
|
24
32
|
|
|
25
33
|
|
|
26
|
-
def _iter_glif_rows(
|
|
34
|
+
def _iter_glif_rows(
|
|
35
|
+
glyph: Mapping[str, Sequence[float]],
|
|
36
|
+
) -> Iterator[list[float]]:
|
|
27
37
|
ts = glyph.get("t", [])
|
|
28
38
|
# Precompute columns for each glyph to avoid repeated lookups.
|
|
29
39
|
# ``default_col`` is shared by reference for missing glyphs to prevent
|
|
@@ -34,7 +44,7 @@ def _iter_glif_rows(glyph):
|
|
|
34
44
|
yield [t] + [col[i] for col in cols]
|
|
35
45
|
|
|
36
46
|
|
|
37
|
-
def export_metrics(G, base_path: str, fmt: str = "csv") -> None:
|
|
47
|
+
def export_metrics(G: Graph, base_path: str, fmt: str = "csv") -> None:
|
|
38
48
|
"""Dump glyphogram and σ(t) trace to compact CSV or JSON files."""
|
|
39
49
|
hist = ensure_history(G)
|
|
40
50
|
glyph = glyphogram_series(G)
|
|
@@ -53,7 +63,7 @@ def export_metrics(G, base_path: str, fmt: str = "csv") -> None:
|
|
|
53
63
|
return 0
|
|
54
64
|
return value
|
|
55
65
|
|
|
56
|
-
def _gen_rows():
|
|
66
|
+
def _gen_rows() -> Iterator[tuple[float, float, float, float, float]]:
|
|
57
67
|
for i, (t, x, y, m, a) in enumerate(rows_raw):
|
|
58
68
|
yield (
|
|
59
69
|
i if t is None else t,
|
|
@@ -65,7 +75,7 @@ def export_metrics(G, base_path: str, fmt: str = "csv") -> None:
|
|
|
65
75
|
|
|
66
76
|
rows_csv, rows_sigma = tee(_gen_rows())
|
|
67
77
|
|
|
68
|
-
sigma:
|
|
78
|
+
sigma: SigmaTrace = {
|
|
69
79
|
"t": [],
|
|
70
80
|
"sigma_x": [],
|
|
71
81
|
"sigma_y": [],
|
|
@@ -78,13 +88,13 @@ def export_metrics(G, base_path: str, fmt: str = "csv") -> None:
|
|
|
78
88
|
sigma["sigma_y"].append(y)
|
|
79
89
|
sigma["mag"].append(m)
|
|
80
90
|
sigma["angle"].append(a)
|
|
81
|
-
morph = hist.get("morph", [])
|
|
82
|
-
epi_supp = hist.get("EPI_support", [])
|
|
91
|
+
morph: Sequence[Mapping[str, float]] = hist.get("morph", [])
|
|
92
|
+
epi_supp: Sequence[Mapping[str, float]] = hist.get("EPI_support", [])
|
|
83
93
|
fmt = fmt.lower()
|
|
84
94
|
if fmt not in {"csv", "json"}:
|
|
85
|
-
raise ValueError(f"
|
|
95
|
+
raise ValueError(f"Unsupported export format: {fmt}")
|
|
86
96
|
if fmt == "csv":
|
|
87
|
-
specs = [
|
|
97
|
+
specs: list[tuple[str, Sequence[str], Iterable[Sequence[object]]]] = [
|
|
88
98
|
(
|
|
89
99
|
"_glyphogram.csv",
|
|
90
100
|
["t", *GLYPHS_CANONICAL],
|
|
@@ -134,4 +144,8 @@ def export_metrics(G, base_path: str, fmt: str = "csv") -> None:
|
|
|
134
144
|
"epi_support": epi_supp,
|
|
135
145
|
}
|
|
136
146
|
json_path = base_path + ".json"
|
|
137
|
-
|
|
147
|
+
|
|
148
|
+
def _write_json(f: TextIO) -> None:
|
|
149
|
+
f.write(json_dumps(data))
|
|
150
|
+
|
|
151
|
+
safe_write(json_path, _write_json)
|
tnfr/metrics/glyph_timing.py
CHANGED
|
@@ -3,20 +3,70 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
5
|
from collections import Counter, defaultdict
|
|
6
|
+
from concurrent.futures import ProcessPoolExecutor
|
|
6
7
|
from dataclasses import dataclass
|
|
7
|
-
|
|
8
|
+
import math
|
|
9
|
+
from types import ModuleType
|
|
10
|
+
from typing import Any, Callable, Mapping, MutableMapping, MutableSequence, Sequence, TypedDict, cast
|
|
8
11
|
|
|
9
12
|
from ..alias import get_attr
|
|
10
13
|
from ..constants import get_aliases, get_param
|
|
11
|
-
from ..
|
|
14
|
+
from ..config.constants import GLYPH_GROUPS, GLYPHS_CANONICAL
|
|
12
15
|
from ..glyph_history import append_metric, last_glyph
|
|
13
|
-
from ..types import
|
|
16
|
+
from ..types import GraphLike
|
|
14
17
|
|
|
15
18
|
ALIAS_EPI = get_aliases("EPI")
|
|
16
19
|
|
|
17
|
-
LATENT_GLYPH =
|
|
20
|
+
LATENT_GLYPH: str = "SHA"
|
|
18
21
|
DEFAULT_EPI_SUPPORT_LIMIT = 0.05
|
|
19
22
|
|
|
23
|
+
try: # pragma: no cover - import guard exercised via tests
|
|
24
|
+
import numpy as _np # type: ignore[import-not-found]
|
|
25
|
+
except Exception: # pragma: no cover - numpy optional dependency
|
|
26
|
+
_np = None
|
|
27
|
+
|
|
28
|
+
np: ModuleType | None = cast(ModuleType | None, _np)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _has_numpy_support(np_obj: object) -> bool:
|
|
32
|
+
"""Return ``True`` when ``np_obj`` exposes the required NumPy API."""
|
|
33
|
+
|
|
34
|
+
return isinstance(np_obj, ModuleType) or (
|
|
35
|
+
np_obj is not None
|
|
36
|
+
and hasattr(np_obj, "fromiter")
|
|
37
|
+
and hasattr(np_obj, "bincount")
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class SigmaTrace(TypedDict):
|
|
42
|
+
"""Time-aligned σ(t) trace exported alongside glyphograms."""
|
|
43
|
+
|
|
44
|
+
t: list[float]
|
|
45
|
+
sigma_x: list[float]
|
|
46
|
+
sigma_y: list[float]
|
|
47
|
+
mag: list[float]
|
|
48
|
+
angle: list[float]
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
GlyphogramRow = MutableMapping[str, float]
|
|
52
|
+
GlyphTimingTotals = MutableMapping[str, float]
|
|
53
|
+
GlyphTimingByNode = MutableMapping[Any, MutableMapping[str, MutableSequence[float]]]
|
|
54
|
+
GlyphCounts = Mapping[str, int]
|
|
55
|
+
GlyphMetricsHistoryValue = MutableMapping[Any, Any] | MutableSequence[Any]
|
|
56
|
+
GlyphMetricsHistory = MutableMapping[str, GlyphMetricsHistoryValue]
|
|
57
|
+
MetricsListHistory = MutableMapping[str, list[Any]]
|
|
58
|
+
|
|
59
|
+
_GLYPH_TO_INDEX = {glyph: idx for idx, glyph in enumerate(GLYPHS_CANONICAL)}
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _coerce_float(value: Any) -> float:
|
|
63
|
+
"""Attempt to coerce ``value`` to ``float`` returning ``0.0`` on failure."""
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
return float(value)
|
|
67
|
+
except (TypeError, ValueError):
|
|
68
|
+
return 0.0
|
|
69
|
+
|
|
20
70
|
|
|
21
71
|
@dataclass
|
|
22
72
|
class GlyphTiming:
|
|
@@ -27,6 +77,10 @@ class GlyphTiming:
|
|
|
27
77
|
__all__ = [
|
|
28
78
|
"LATENT_GLYPH",
|
|
29
79
|
"GlyphTiming",
|
|
80
|
+
"SigmaTrace",
|
|
81
|
+
"GlyphogramRow",
|
|
82
|
+
"GlyphTimingTotals",
|
|
83
|
+
"GlyphTimingByNode",
|
|
30
84
|
"_tg_state",
|
|
31
85
|
"for_each_glyph",
|
|
32
86
|
"_update_tg_node",
|
|
@@ -44,13 +98,34 @@ __all__ = [
|
|
|
44
98
|
# ---------------------------------------------------------------------------
|
|
45
99
|
|
|
46
100
|
|
|
47
|
-
def
|
|
101
|
+
def _count_glyphs_chunk(chunk: Sequence[str]) -> Counter[str]:
|
|
102
|
+
"""Count glyph occurrences within a chunk (multiprocessing helper)."""
|
|
103
|
+
|
|
104
|
+
counter: Counter[str] = Counter()
|
|
105
|
+
for glyph in chunk:
|
|
106
|
+
counter[glyph] += 1
|
|
107
|
+
return counter
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def _epi_support_chunk(values: Sequence[float], threshold: float) -> tuple[float, int]:
|
|
111
|
+
"""Compute EPI support contribution for a chunk."""
|
|
112
|
+
|
|
113
|
+
total = 0.0
|
|
114
|
+
count = 0
|
|
115
|
+
for value in values:
|
|
116
|
+
if value >= threshold:
|
|
117
|
+
total += value
|
|
118
|
+
count += 1
|
|
119
|
+
return total, count
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _tg_state(nd: MutableMapping[str, Any]) -> GlyphTiming:
|
|
48
123
|
"""Expose per-node glyph timing state."""
|
|
49
124
|
|
|
50
125
|
return nd.setdefault("_Tg", GlyphTiming())
|
|
51
126
|
|
|
52
127
|
|
|
53
|
-
def for_each_glyph(fn: Callable[[str],
|
|
128
|
+
def for_each_glyph(fn: Callable[[str], None]) -> None:
|
|
54
129
|
"""Apply ``fn`` to each canonical structural operator."""
|
|
55
130
|
|
|
56
131
|
for g in GLYPHS_CANONICAL:
|
|
@@ -62,7 +137,13 @@ def for_each_glyph(fn: Callable[[str], Any]) -> None:
|
|
|
62
137
|
# ---------------------------------------------------------------------------
|
|
63
138
|
|
|
64
139
|
|
|
65
|
-
def _update_tg_node(
|
|
140
|
+
def _update_tg_node(
|
|
141
|
+
n: Any,
|
|
142
|
+
nd: MutableMapping[str, Any],
|
|
143
|
+
dt: float,
|
|
144
|
+
tg_total: GlyphTimingTotals,
|
|
145
|
+
tg_by_node: GlyphTimingByNode | None,
|
|
146
|
+
) -> tuple[str | None, bool]:
|
|
66
147
|
"""Track a node's glyph transition and accumulate run time."""
|
|
67
148
|
|
|
68
149
|
g = last_glyph(nd)
|
|
@@ -85,19 +166,31 @@ def _update_tg_node(n, nd, dt, tg_total, tg_by_node):
|
|
|
85
166
|
return g, g == LATENT_GLYPH
|
|
86
167
|
|
|
87
168
|
|
|
88
|
-
def _update_tg(
|
|
169
|
+
def _update_tg(
|
|
170
|
+
G: GraphLike,
|
|
171
|
+
hist: GlyphMetricsHistory,
|
|
172
|
+
dt: float,
|
|
173
|
+
save_by_node: bool,
|
|
174
|
+
n_jobs: int | None = None,
|
|
175
|
+
) -> tuple[Counter[str], int, int]:
|
|
89
176
|
"""Accumulate glyph dwell times for the entire graph."""
|
|
90
177
|
|
|
91
|
-
|
|
92
|
-
tg_total = hist.setdefault("Tg_total", defaultdict(float))
|
|
178
|
+
tg_total = cast(GlyphTimingTotals, hist.setdefault("Tg_total", defaultdict(float)))
|
|
93
179
|
tg_by_node = (
|
|
94
|
-
|
|
180
|
+
cast(
|
|
181
|
+
GlyphTimingByNode,
|
|
182
|
+
hist.setdefault(
|
|
183
|
+
"Tg_by_node",
|
|
184
|
+
defaultdict(lambda: defaultdict(list)),
|
|
185
|
+
),
|
|
186
|
+
)
|
|
95
187
|
if save_by_node
|
|
96
188
|
else None
|
|
97
189
|
)
|
|
98
190
|
|
|
99
191
|
n_total = 0
|
|
100
192
|
n_latent = 0
|
|
193
|
+
glyph_sequence: list[str] = []
|
|
101
194
|
for n, nd in G.nodes(data=True):
|
|
102
195
|
g, is_latent = _update_tg_node(n, nd, dt, tg_total, tg_by_node)
|
|
103
196
|
if g is None:
|
|
@@ -105,56 +198,142 @@ def _update_tg(G, hist, dt, save_by_node: bool):
|
|
|
105
198
|
n_total += 1
|
|
106
199
|
if is_latent:
|
|
107
200
|
n_latent += 1
|
|
108
|
-
|
|
201
|
+
glyph_sequence.append(g)
|
|
202
|
+
|
|
203
|
+
counts: Counter[str] = Counter()
|
|
204
|
+
if not glyph_sequence:
|
|
205
|
+
return counts, n_total, n_latent
|
|
206
|
+
|
|
207
|
+
if _has_numpy_support(np):
|
|
208
|
+
glyph_idx = np.fromiter(
|
|
209
|
+
(_GLYPH_TO_INDEX[glyph] for glyph in glyph_sequence),
|
|
210
|
+
dtype=np.int64,
|
|
211
|
+
count=len(glyph_sequence),
|
|
212
|
+
)
|
|
213
|
+
freq = np.bincount(glyph_idx, minlength=len(GLYPHS_CANONICAL))
|
|
214
|
+
counts.update(
|
|
215
|
+
{
|
|
216
|
+
glyph: int(freq[_GLYPH_TO_INDEX[glyph]])
|
|
217
|
+
for glyph in GLYPHS_CANONICAL
|
|
218
|
+
if freq[_GLYPH_TO_INDEX[glyph]]
|
|
219
|
+
}
|
|
220
|
+
)
|
|
221
|
+
elif n_jobs is not None and n_jobs > 1 and len(glyph_sequence) > 1:
|
|
222
|
+
chunk_size = max(1, math.ceil(len(glyph_sequence) / n_jobs))
|
|
223
|
+
futures = []
|
|
224
|
+
with ProcessPoolExecutor(max_workers=n_jobs) as executor:
|
|
225
|
+
for start in range(0, len(glyph_sequence), chunk_size):
|
|
226
|
+
chunk = glyph_sequence[start:start + chunk_size]
|
|
227
|
+
futures.append(executor.submit(_count_glyphs_chunk, chunk))
|
|
228
|
+
for future in futures:
|
|
229
|
+
counts.update(future.result())
|
|
230
|
+
else:
|
|
231
|
+
counts.update(glyph_sequence)
|
|
232
|
+
|
|
109
233
|
return counts, n_total, n_latent
|
|
110
234
|
|
|
111
235
|
|
|
112
|
-
def _update_glyphogram(
|
|
236
|
+
def _update_glyphogram(
|
|
237
|
+
G: GraphLike,
|
|
238
|
+
hist: GlyphMetricsHistory,
|
|
239
|
+
counts: GlyphCounts,
|
|
240
|
+
t: float,
|
|
241
|
+
n_total: int,
|
|
242
|
+
) -> None:
|
|
113
243
|
"""Record glyphogram row from glyph counts."""
|
|
114
244
|
|
|
115
245
|
normalize_series = bool(get_param(G, "METRICS").get("normalize_series", False))
|
|
116
|
-
row = {"t": t}
|
|
246
|
+
row: GlyphogramRow = {"t": t}
|
|
117
247
|
total = max(1, n_total)
|
|
118
248
|
for g in GLYPHS_CANONICAL:
|
|
119
249
|
c = counts.get(g, 0)
|
|
120
250
|
row[g] = (c / total) if normalize_series else c
|
|
121
|
-
append_metric(hist, "glyphogram", row)
|
|
251
|
+
append_metric(cast(MetricsListHistory, hist), "glyphogram", row)
|
|
122
252
|
|
|
123
253
|
|
|
124
|
-
def _update_latency_index(
|
|
254
|
+
def _update_latency_index(
|
|
255
|
+
G: GraphLike,
|
|
256
|
+
hist: GlyphMetricsHistory,
|
|
257
|
+
n_total: int,
|
|
258
|
+
n_latent: int,
|
|
259
|
+
t: float,
|
|
260
|
+
) -> None:
|
|
125
261
|
"""Record latency index for the current step."""
|
|
126
262
|
|
|
127
263
|
li = n_latent / max(1, n_total)
|
|
128
|
-
append_metric(
|
|
264
|
+
append_metric(
|
|
265
|
+
cast(MetricsListHistory, hist),
|
|
266
|
+
"latency_index",
|
|
267
|
+
{"t": t, "value": li},
|
|
268
|
+
)
|
|
129
269
|
|
|
130
270
|
|
|
131
271
|
def _update_epi_support(
|
|
132
|
-
G,
|
|
133
|
-
hist,
|
|
134
|
-
t,
|
|
272
|
+
G: GraphLike,
|
|
273
|
+
hist: GlyphMetricsHistory,
|
|
274
|
+
t: float,
|
|
135
275
|
threshold: float = DEFAULT_EPI_SUPPORT_LIMIT,
|
|
136
|
-
|
|
276
|
+
n_jobs: int | None = None,
|
|
277
|
+
) -> None:
|
|
137
278
|
"""Measure EPI support and normalized magnitude."""
|
|
138
279
|
|
|
280
|
+
node_count = G.number_of_nodes()
|
|
139
281
|
total = 0.0
|
|
140
282
|
count = 0
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
283
|
+
|
|
284
|
+
if _has_numpy_support(np) and node_count:
|
|
285
|
+
epi_values = np.fromiter(
|
|
286
|
+
(
|
|
287
|
+
abs(_coerce_float(get_attr(nd, ALIAS_EPI, 0.0)))
|
|
288
|
+
for _, nd in G.nodes(data=True)
|
|
289
|
+
),
|
|
290
|
+
dtype=float,
|
|
291
|
+
count=node_count,
|
|
292
|
+
)
|
|
293
|
+
mask = epi_values >= threshold
|
|
294
|
+
count = int(mask.sum())
|
|
295
|
+
if count:
|
|
296
|
+
total = float(epi_values[mask].sum())
|
|
297
|
+
elif n_jobs is not None and n_jobs > 1 and node_count > 1:
|
|
298
|
+
values = [
|
|
299
|
+
abs(_coerce_float(get_attr(nd, ALIAS_EPI, 0.0)))
|
|
300
|
+
for _, nd in G.nodes(data=True)
|
|
301
|
+
]
|
|
302
|
+
chunk_size = max(1, math.ceil(len(values) / n_jobs))
|
|
303
|
+
totals: list[tuple[float, int]] = []
|
|
304
|
+
with ProcessPoolExecutor(max_workers=n_jobs) as executor:
|
|
305
|
+
futures = []
|
|
306
|
+
for start in range(0, len(values), chunk_size):
|
|
307
|
+
chunk = values[start:start + chunk_size]
|
|
308
|
+
futures.append(executor.submit(_epi_support_chunk, chunk, threshold))
|
|
309
|
+
for future in futures:
|
|
310
|
+
totals.append(future.result())
|
|
311
|
+
for part_total, part_count in totals:
|
|
312
|
+
total += part_total
|
|
313
|
+
count += part_count
|
|
314
|
+
else:
|
|
315
|
+
for _, nd in G.nodes(data=True):
|
|
316
|
+
epi_val = abs(_coerce_float(get_attr(nd, ALIAS_EPI, 0.0)))
|
|
317
|
+
if epi_val >= threshold:
|
|
318
|
+
total += epi_val
|
|
319
|
+
count += 1
|
|
146
320
|
epi_norm = (total / count) if count else 0.0
|
|
147
321
|
append_metric(
|
|
148
|
-
hist,
|
|
322
|
+
cast(MetricsListHistory, hist),
|
|
149
323
|
"EPI_support",
|
|
150
324
|
{"t": t, "size": count, "epi_norm": float(epi_norm)},
|
|
151
325
|
)
|
|
152
326
|
|
|
153
327
|
|
|
154
|
-
def _update_morph_metrics(
|
|
328
|
+
def _update_morph_metrics(
|
|
329
|
+
G: GraphLike,
|
|
330
|
+
hist: GlyphMetricsHistory,
|
|
331
|
+
counts: GlyphCounts,
|
|
332
|
+
t: float,
|
|
333
|
+
) -> None:
|
|
155
334
|
"""Capture morphosyntactic distribution of glyphs."""
|
|
156
335
|
|
|
157
|
-
def get_count(keys):
|
|
336
|
+
def get_count(keys: Sequence[str]) -> int:
|
|
158
337
|
return sum(counts.get(k, 0) for k in keys)
|
|
159
338
|
|
|
160
339
|
total = max(1, sum(counts.values()))
|
|
@@ -165,25 +344,26 @@ def _update_morph_metrics(G, hist, counts, t):
|
|
|
165
344
|
den = get_count(GLYPH_GROUPS.get("PP_den", ()))
|
|
166
345
|
pp_val = 0.0 if den == 0 else num / den
|
|
167
346
|
append_metric(
|
|
168
|
-
hist,
|
|
347
|
+
cast(MetricsListHistory, hist),
|
|
169
348
|
"morph",
|
|
170
349
|
{"t": t, "ID": id_val, "CM": cm_val, "NE": ne_val, "PP": pp_val},
|
|
171
350
|
)
|
|
172
351
|
|
|
173
352
|
|
|
174
353
|
def _compute_advanced_metrics(
|
|
175
|
-
G,
|
|
176
|
-
hist,
|
|
177
|
-
t,
|
|
178
|
-
dt,
|
|
179
|
-
cfg,
|
|
354
|
+
G: GraphLike,
|
|
355
|
+
hist: GlyphMetricsHistory,
|
|
356
|
+
t: float,
|
|
357
|
+
dt: float,
|
|
358
|
+
cfg: Mapping[str, Any],
|
|
180
359
|
threshold: float = DEFAULT_EPI_SUPPORT_LIMIT,
|
|
181
|
-
|
|
360
|
+
n_jobs: int | None = None,
|
|
361
|
+
) -> None:
|
|
182
362
|
"""Compute glyph timing derived metrics."""
|
|
183
363
|
|
|
184
364
|
save_by_node = bool(cfg.get("save_by_node", True))
|
|
185
|
-
counts, n_total, n_latent = _update_tg(G, hist, dt, save_by_node)
|
|
365
|
+
counts, n_total, n_latent = _update_tg(G, hist, dt, save_by_node, n_jobs=n_jobs)
|
|
186
366
|
_update_glyphogram(G, hist, counts, t, n_total)
|
|
187
367
|
_update_latency_index(G, hist, n_total, n_latent, t)
|
|
188
|
-
_update_epi_support(G, hist, t, threshold)
|
|
368
|
+
_update_epi_support(G, hist, t, threshold, n_jobs=n_jobs)
|
|
189
369
|
_update_morph_metrics(G, hist, counts, t)
|
tnfr/metrics/reporting.py
CHANGED
|
@@ -2,12 +2,14 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
+
from collections.abc import Sequence
|
|
5
6
|
from typing import Any
|
|
6
7
|
|
|
7
8
|
from heapq import nlargest
|
|
8
9
|
from statistics import mean, fmean, StatisticsError
|
|
9
10
|
|
|
10
11
|
from ..glyph_history import ensure_history
|
|
12
|
+
from ..types import NodeId, TNFRGraph
|
|
11
13
|
from ..sense import sigma_rose
|
|
12
14
|
from .glyph_timing import for_each_glyph
|
|
13
15
|
|
|
@@ -26,7 +28,7 @@ __all__ = [
|
|
|
26
28
|
# ---------------------------------------------------------------------------
|
|
27
29
|
|
|
28
30
|
|
|
29
|
-
def Tg_global(G, normalize: bool = True) -> dict[str, float]:
|
|
31
|
+
def Tg_global(G: TNFRGraph, normalize: bool = True) -> dict[str, float]:
|
|
30
32
|
"""Total glyph dwell time per class."""
|
|
31
33
|
|
|
32
34
|
hist = ensure_history(G)
|
|
@@ -34,7 +36,7 @@ def Tg_global(G, normalize: bool = True) -> dict[str, float]:
|
|
|
34
36
|
total = sum(tg_total.values()) or 1.0
|
|
35
37
|
out: dict[str, float] = {}
|
|
36
38
|
|
|
37
|
-
def add(g):
|
|
39
|
+
def add(g: str) -> None:
|
|
38
40
|
val = float(tg_total.get(g, 0.0))
|
|
39
41
|
out[g] = val / total if normalize else val
|
|
40
42
|
|
|
@@ -42,30 +44,32 @@ def Tg_global(G, normalize: bool = True) -> dict[str, float]:
|
|
|
42
44
|
return out
|
|
43
45
|
|
|
44
46
|
|
|
45
|
-
def Tg_by_node(
|
|
47
|
+
def Tg_by_node(
|
|
48
|
+
G: TNFRGraph, n: NodeId, normalize: bool = False
|
|
49
|
+
) -> dict[str, float] | dict[str, list[float]]:
|
|
46
50
|
"""Per-node glyph dwell summary."""
|
|
47
51
|
|
|
48
52
|
hist = ensure_history(G)
|
|
49
53
|
rec = hist.get("Tg_by_node", {}).get(n, {})
|
|
50
54
|
if not normalize:
|
|
51
|
-
|
|
55
|
+
runs_out: dict[str, list[float]] = {}
|
|
52
56
|
|
|
53
|
-
def copy_runs(g):
|
|
54
|
-
|
|
57
|
+
def copy_runs(g: str) -> None:
|
|
58
|
+
runs_out[g] = list(rec.get(g, []))
|
|
55
59
|
|
|
56
60
|
for_each_glyph(copy_runs)
|
|
57
|
-
return
|
|
58
|
-
|
|
61
|
+
return runs_out
|
|
62
|
+
mean_out: dict[str, float] = {}
|
|
59
63
|
|
|
60
|
-
def add(g):
|
|
64
|
+
def add(g: str) -> None:
|
|
61
65
|
runs = rec.get(g, [])
|
|
62
|
-
|
|
66
|
+
mean_out[g] = float(mean(runs)) if runs else 0.0
|
|
63
67
|
|
|
64
68
|
for_each_glyph(add)
|
|
65
|
-
return
|
|
69
|
+
return mean_out
|
|
66
70
|
|
|
67
71
|
|
|
68
|
-
def latency_series(G) -> dict[str, list[float]]:
|
|
72
|
+
def latency_series(G: TNFRGraph) -> dict[str, list[float]]:
|
|
69
73
|
hist = ensure_history(G)
|
|
70
74
|
xs = hist.get("latency_index", [])
|
|
71
75
|
return {
|
|
@@ -74,21 +78,21 @@ def latency_series(G) -> dict[str, list[float]]:
|
|
|
74
78
|
}
|
|
75
79
|
|
|
76
80
|
|
|
77
|
-
def glyphogram_series(G) -> dict[str, list[float]]:
|
|
81
|
+
def glyphogram_series(G: TNFRGraph) -> dict[str, list[float]]:
|
|
78
82
|
hist = ensure_history(G)
|
|
79
83
|
xs = hist.get("glyphogram", [])
|
|
80
84
|
if not xs:
|
|
81
85
|
return {"t": []}
|
|
82
86
|
out: dict[str, list[float]] = {"t": [float(x.get("t", i)) for i, x in enumerate(xs)]}
|
|
83
87
|
|
|
84
|
-
def add(g):
|
|
88
|
+
def add(g: str) -> None:
|
|
85
89
|
out[g] = [float(x.get(g, 0.0)) for x in xs]
|
|
86
90
|
|
|
87
91
|
for_each_glyph(add)
|
|
88
92
|
return out
|
|
89
93
|
|
|
90
94
|
|
|
91
|
-
def glyph_top(G, k: int = 3) -> list[tuple[str, float]]:
|
|
95
|
+
def glyph_top(G: TNFRGraph, k: int = 3) -> list[tuple[str, float]]:
|
|
92
96
|
"""Top-k structural operators by ``Tg_global`` fraction."""
|
|
93
97
|
|
|
94
98
|
k = int(k)
|
|
@@ -99,8 +103,8 @@ def glyph_top(G, k: int = 3) -> list[tuple[str, float]]:
|
|
|
99
103
|
|
|
100
104
|
|
|
101
105
|
def build_metrics_summary(
|
|
102
|
-
G, *, series_limit: int | None = None
|
|
103
|
-
) -> tuple[dict[str,
|
|
106
|
+
G: TNFRGraph, *, series_limit: int | None = None
|
|
107
|
+
) -> tuple[dict[str, float | dict[str, float] | dict[str, list[float]] | dict[str, int]], bool]:
|
|
104
108
|
"""Collect a compact metrics summary for CLI reporting.
|
|
105
109
|
|
|
106
110
|
Parameters
|
|
@@ -131,7 +135,7 @@ def build_metrics_summary(
|
|
|
131
135
|
if limit <= 0:
|
|
132
136
|
limit = None
|
|
133
137
|
|
|
134
|
-
def _trim(values:
|
|
138
|
+
def _trim(values: Sequence[Any]) -> list[Any]:
|
|
135
139
|
seq = list(values)
|
|
136
140
|
if limit is None:
|
|
137
141
|
return seq
|