tnfr 4.5.2__py3-none-any.whl → 7.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +275 -51
- tnfr/__init__.pyi +33 -0
- tnfr/_compat.py +10 -0
- tnfr/_generated_version.py +34 -0
- tnfr/_version.py +49 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +117 -31
- tnfr/alias.pyi +108 -0
- tnfr/cache.py +6 -572
- tnfr/cache.pyi +16 -0
- tnfr/callback_utils.py +16 -38
- tnfr/callback_utils.pyi +79 -0
- tnfr/cli/__init__.py +34 -14
- tnfr/cli/__init__.pyi +26 -0
- tnfr/cli/arguments.py +211 -28
- tnfr/cli/arguments.pyi +27 -0
- tnfr/cli/execution.py +470 -50
- tnfr/cli/execution.pyi +70 -0
- tnfr/cli/utils.py +18 -3
- tnfr/cli/utils.pyi +8 -0
- tnfr/config/__init__.py +13 -0
- tnfr/config/__init__.pyi +10 -0
- tnfr/{constants_glyphs.py → config/constants.py} +26 -20
- tnfr/config/constants.pyi +12 -0
- tnfr/config/feature_flags.py +83 -0
- tnfr/{config.py → config/init.py} +11 -7
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +93 -0
- tnfr/config/operator_names.pyi +28 -0
- tnfr/config/presets.py +84 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/constants/__init__.py +80 -29
- tnfr/constants/__init__.pyi +92 -0
- tnfr/constants/aliases.py +31 -0
- tnfr/constants/core.py +4 -4
- tnfr/constants/core.pyi +17 -0
- tnfr/constants/init.py +1 -1
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +7 -15
- tnfr/constants/metric.pyi +19 -0
- tnfr/dynamics/__init__.py +165 -633
- tnfr/dynamics/__init__.pyi +82 -0
- tnfr/dynamics/adaptation.py +267 -0
- tnfr/dynamics/aliases.py +23 -0
- tnfr/dynamics/coordination.py +385 -0
- tnfr/dynamics/dnfr.py +2283 -400
- tnfr/dynamics/dnfr.pyi +24 -0
- tnfr/dynamics/integrators.py +406 -98
- tnfr/dynamics/integrators.pyi +34 -0
- tnfr/dynamics/runtime.py +881 -0
- tnfr/dynamics/sampling.py +10 -5
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +719 -0
- tnfr/execution.py +70 -48
- tnfr/execution.pyi +45 -0
- tnfr/flatten.py +13 -9
- tnfr/flatten.pyi +21 -0
- tnfr/gamma.py +66 -53
- tnfr/gamma.pyi +34 -0
- tnfr/glyph_history.py +110 -52
- tnfr/glyph_history.pyi +35 -0
- tnfr/glyph_runtime.py +16 -0
- tnfr/glyph_runtime.pyi +9 -0
- tnfr/immutable.py +69 -28
- tnfr/immutable.pyi +34 -0
- tnfr/initialization.py +16 -16
- tnfr/initialization.pyi +65 -0
- tnfr/io.py +6 -240
- tnfr/io.pyi +16 -0
- tnfr/locking.pyi +7 -0
- tnfr/mathematics/__init__.py +81 -0
- tnfr/mathematics/backend.py +426 -0
- tnfr/mathematics/dynamics.py +398 -0
- tnfr/mathematics/epi.py +254 -0
- tnfr/mathematics/generators.py +222 -0
- tnfr/mathematics/metrics.py +119 -0
- tnfr/mathematics/operators.py +233 -0
- tnfr/mathematics/operators_factory.py +71 -0
- tnfr/mathematics/projection.py +78 -0
- tnfr/mathematics/runtime.py +173 -0
- tnfr/mathematics/spaces.py +247 -0
- tnfr/mathematics/transforms.py +292 -0
- tnfr/metrics/__init__.py +10 -10
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/coherence.py +993 -324
- tnfr/metrics/common.py +23 -16
- tnfr/metrics/common.pyi +46 -0
- tnfr/metrics/core.py +251 -35
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +708 -111
- tnfr/metrics/diagnosis.pyi +85 -0
- tnfr/metrics/export.py +27 -15
- tnfr/metrics/glyph_timing.py +232 -42
- tnfr/metrics/reporting.py +33 -22
- tnfr/metrics/reporting.pyi +12 -0
- tnfr/metrics/sense_index.py +987 -43
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +214 -23
- tnfr/metrics/trig.pyi +13 -0
- tnfr/metrics/trig_cache.py +115 -22
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/node.py +542 -136
- tnfr/node.pyi +178 -0
- tnfr/observers.py +152 -35
- tnfr/observers.pyi +31 -0
- tnfr/ontosim.py +23 -19
- tnfr/ontosim.pyi +28 -0
- tnfr/operators/__init__.py +601 -82
- tnfr/operators/__init__.pyi +45 -0
- tnfr/operators/definitions.py +513 -0
- tnfr/operators/definitions.pyi +78 -0
- tnfr/operators/grammar.py +760 -0
- tnfr/operators/jitter.py +107 -38
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/registry.py +75 -0
- tnfr/operators/registry.pyi +13 -0
- tnfr/operators/remesh.py +149 -88
- tnfr/py.typed +0 -0
- tnfr/rng.py +46 -143
- tnfr/rng.pyi +14 -0
- tnfr/schemas/__init__.py +8 -0
- tnfr/schemas/grammar.json +94 -0
- tnfr/selector.py +25 -19
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +72 -62
- tnfr/sense.pyi +23 -0
- tnfr/structural.py +522 -262
- tnfr/structural.pyi +69 -0
- tnfr/telemetry/__init__.py +35 -0
- tnfr/telemetry/cache_metrics.py +226 -0
- tnfr/telemetry/nu_f.py +423 -0
- tnfr/telemetry/nu_f.pyi +123 -0
- tnfr/telemetry/verbosity.py +37 -0
- tnfr/tokens.py +1 -3
- tnfr/tokens.pyi +36 -0
- tnfr/trace.py +270 -113
- tnfr/trace.pyi +40 -0
- tnfr/types.py +574 -6
- tnfr/types.pyi +331 -0
- tnfr/units.py +69 -0
- tnfr/units.pyi +16 -0
- tnfr/utils/__init__.py +217 -0
- tnfr/utils/__init__.pyi +202 -0
- tnfr/utils/cache.py +2395 -0
- tnfr/utils/cache.pyi +468 -0
- tnfr/utils/chunks.py +104 -0
- tnfr/utils/chunks.pyi +21 -0
- tnfr/{collections_utils.py → utils/data.py} +147 -90
- tnfr/utils/data.pyi +64 -0
- tnfr/utils/graph.py +85 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +770 -0
- tnfr/utils/init.pyi +78 -0
- tnfr/utils/io.py +456 -0
- tnfr/{helpers → utils}/numeric.py +51 -24
- tnfr/utils/numeric.pyi +21 -0
- tnfr/validation/__init__.py +113 -0
- tnfr/validation/__init__.pyi +77 -0
- tnfr/validation/compatibility.py +95 -0
- tnfr/validation/compatibility.pyi +6 -0
- tnfr/validation/grammar.py +71 -0
- tnfr/validation/grammar.pyi +40 -0
- tnfr/validation/graph.py +138 -0
- tnfr/validation/graph.pyi +17 -0
- tnfr/validation/rules.py +281 -0
- tnfr/validation/rules.pyi +55 -0
- tnfr/validation/runtime.py +263 -0
- tnfr/validation/runtime.pyi +31 -0
- tnfr/validation/soft_filters.py +170 -0
- tnfr/validation/soft_filters.pyi +37 -0
- tnfr/validation/spectral.py +159 -0
- tnfr/validation/spectral.pyi +46 -0
- tnfr/validation/syntax.py +40 -0
- tnfr/validation/syntax.pyi +10 -0
- tnfr/validation/window.py +39 -0
- tnfr/validation/window.pyi +1 -0
- tnfr/viz/__init__.py +9 -0
- tnfr/viz/matplotlib.py +246 -0
- tnfr-7.0.0.dist-info/METADATA +179 -0
- tnfr-7.0.0.dist-info/RECORD +185 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/licenses/LICENSE.md +1 -1
- tnfr/grammar.py +0 -344
- tnfr/graph_utils.py +0 -84
- tnfr/helpers/__init__.py +0 -71
- tnfr/import_utils.py +0 -228
- tnfr/json_utils.py +0 -162
- tnfr/logging_utils.py +0 -116
- tnfr/presets.py +0 -60
- tnfr/validators.py +0 -84
- tnfr/value_utils.py +0 -59
- tnfr-4.5.2.dist-info/METADATA +0 -379
- tnfr-4.5.2.dist-info/RECORD +0 -67
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/WHEEL +0 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
from typing import Any, Mapping, Sequence
|
|
2
|
+
|
|
3
|
+
__all__: Any
|
|
4
|
+
|
|
5
|
+
def __getattr__(name: str) -> Any: ...
|
|
6
|
+
|
|
7
|
+
ALIAS_DNFR: Any
|
|
8
|
+
ALIAS_EPI: Any
|
|
9
|
+
ALIAS_SI: Any
|
|
10
|
+
ALIAS_VF: Any
|
|
11
|
+
CallbackEvent: Any
|
|
12
|
+
CoherenceMatrixPayload: Any
|
|
13
|
+
Iterable: Any
|
|
14
|
+
ProcessPoolExecutor: Any
|
|
15
|
+
StatisticsError: Any
|
|
16
|
+
TNFRGraph: Any
|
|
17
|
+
TRANSITION: Any
|
|
18
|
+
VF_KEY: Any
|
|
19
|
+
annotations: Any
|
|
20
|
+
append_metric: Any
|
|
21
|
+
callback_manager: Any
|
|
22
|
+
clamp01: Any
|
|
23
|
+
coherence_matrix: Any
|
|
24
|
+
compute_dnfr_accel_max: Any
|
|
25
|
+
compute_theta_trig: Any
|
|
26
|
+
dissonance_events: Any
|
|
27
|
+
ensure_history: Any
|
|
28
|
+
fmean: Any
|
|
29
|
+
ge: Any
|
|
30
|
+
get_aliases: Any
|
|
31
|
+
get_attr: Any
|
|
32
|
+
get_numpy: Any
|
|
33
|
+
get_param: Any
|
|
34
|
+
get_trig_cache: Any
|
|
35
|
+
le: Any
|
|
36
|
+
local_phase_sync: Any
|
|
37
|
+
math: Any
|
|
38
|
+
min_max_range: Any
|
|
39
|
+
normalize_dnfr: Any
|
|
40
|
+
partial: Any
|
|
41
|
+
register_diagnosis_callbacks: Any
|
|
42
|
+
similarity_abs: Any
|
|
43
|
+
|
|
44
|
+
class RLocalWorkerArgs:
|
|
45
|
+
chunk: Sequence[Any]
|
|
46
|
+
coherence_nodes: Sequence[Any]
|
|
47
|
+
weight_matrix: Any
|
|
48
|
+
weight_index: Mapping[Any, int]
|
|
49
|
+
neighbors_map: Mapping[Any, tuple[Any, ...]]
|
|
50
|
+
cos_map: Mapping[Any, float]
|
|
51
|
+
sin_map: Mapping[Any, float]
|
|
52
|
+
|
|
53
|
+
def __init__(
|
|
54
|
+
self,
|
|
55
|
+
chunk: Sequence[Any],
|
|
56
|
+
coherence_nodes: Sequence[Any],
|
|
57
|
+
weight_matrix: Any,
|
|
58
|
+
weight_index: Mapping[Any, int],
|
|
59
|
+
neighbors_map: Mapping[Any, tuple[Any, ...]],
|
|
60
|
+
cos_map: Mapping[Any, float],
|
|
61
|
+
sin_map: Mapping[Any, float],
|
|
62
|
+
) -> None: ...
|
|
63
|
+
|
|
64
|
+
class NeighborMeanWorkerArgs:
|
|
65
|
+
chunk: Sequence[Any]
|
|
66
|
+
neighbors_map: Mapping[Any, tuple[Any, ...]]
|
|
67
|
+
epi_map: Mapping[Any, float]
|
|
68
|
+
|
|
69
|
+
def __init__(
|
|
70
|
+
self,
|
|
71
|
+
chunk: Sequence[Any],
|
|
72
|
+
neighbors_map: Mapping[Any, tuple[Any, ...]],
|
|
73
|
+
epi_map: Mapping[Any, float],
|
|
74
|
+
) -> None: ...
|
|
75
|
+
|
|
76
|
+
def _rlocal_worker(args: RLocalWorkerArgs) -> list[float]: ...
|
|
77
|
+
def _neighbor_mean_worker(args: NeighborMeanWorkerArgs) -> list[float | None]: ...
|
|
78
|
+
def _state_from_thresholds(
|
|
79
|
+
Rloc: float, dnfr_n: float, cfg: Mapping[str, Any]
|
|
80
|
+
) -> str: ...
|
|
81
|
+
def _recommendation(state: str, cfg: Mapping[str, Any]) -> list[Any]: ...
|
|
82
|
+
def _get_last_weights(
|
|
83
|
+
G: TNFRGraph,
|
|
84
|
+
hist: Mapping[str, Sequence[CoherenceMatrixPayload | None]],
|
|
85
|
+
) -> tuple[CoherenceMatrixPayload | None, CoherenceMatrixPayload | None]: ...
|
tnfr/metrics/export.py
CHANGED
|
@@ -4,17 +4,23 @@ from __future__ import annotations
|
|
|
4
4
|
|
|
5
5
|
import csv
|
|
6
6
|
import math
|
|
7
|
-
from
|
|
7
|
+
from collections.abc import Iterable, Iterator, Sequence
|
|
8
|
+
from itertools import tee, zip_longest
|
|
9
|
+
from typing import Mapping, TextIO
|
|
8
10
|
|
|
11
|
+
from ..config.constants import GLYPHS_CANONICAL
|
|
9
12
|
from ..glyph_history import ensure_history
|
|
10
|
-
from ..
|
|
11
|
-
from ..
|
|
13
|
+
from ..utils import json_dumps, safe_write
|
|
14
|
+
from ..types import Graph, SigmaTrace
|
|
12
15
|
from .core import glyphogram_series
|
|
13
|
-
from ..json_utils import json_dumps
|
|
14
16
|
|
|
15
17
|
|
|
16
|
-
def _write_csv(
|
|
17
|
-
|
|
18
|
+
def _write_csv(
|
|
19
|
+
path: str,
|
|
20
|
+
headers: Sequence[str],
|
|
21
|
+
rows: Iterable[Sequence[object]],
|
|
22
|
+
) -> None:
|
|
23
|
+
def _write(f: TextIO) -> None:
|
|
18
24
|
writer = csv.writer(f)
|
|
19
25
|
writer.writerow(headers)
|
|
20
26
|
for row in rows:
|
|
@@ -23,7 +29,9 @@ def _write_csv(path, headers, rows):
|
|
|
23
29
|
safe_write(path, _write, newline="")
|
|
24
30
|
|
|
25
31
|
|
|
26
|
-
def _iter_glif_rows(
|
|
32
|
+
def _iter_glif_rows(
|
|
33
|
+
glyph: Mapping[str, Sequence[float]],
|
|
34
|
+
) -> Iterator[list[float]]:
|
|
27
35
|
ts = glyph.get("t", [])
|
|
28
36
|
# Precompute columns for each glyph to avoid repeated lookups.
|
|
29
37
|
# ``default_col`` is shared by reference for missing glyphs to prevent
|
|
@@ -34,7 +42,7 @@ def _iter_glif_rows(glyph):
|
|
|
34
42
|
yield [t] + [col[i] for col in cols]
|
|
35
43
|
|
|
36
44
|
|
|
37
|
-
def export_metrics(G, base_path: str, fmt: str = "csv") -> None:
|
|
45
|
+
def export_metrics(G: Graph, base_path: str, fmt: str = "csv") -> None:
|
|
38
46
|
"""Dump glyphogram and σ(t) trace to compact CSV or JSON files."""
|
|
39
47
|
hist = ensure_history(G)
|
|
40
48
|
glyph = glyphogram_series(G)
|
|
@@ -53,7 +61,7 @@ def export_metrics(G, base_path: str, fmt: str = "csv") -> None:
|
|
|
53
61
|
return 0
|
|
54
62
|
return value
|
|
55
63
|
|
|
56
|
-
def _gen_rows():
|
|
64
|
+
def _gen_rows() -> Iterator[tuple[float, float, float, float, float]]:
|
|
57
65
|
for i, (t, x, y, m, a) in enumerate(rows_raw):
|
|
58
66
|
yield (
|
|
59
67
|
i if t is None else t,
|
|
@@ -65,7 +73,7 @@ def export_metrics(G, base_path: str, fmt: str = "csv") -> None:
|
|
|
65
73
|
|
|
66
74
|
rows_csv, rows_sigma = tee(_gen_rows())
|
|
67
75
|
|
|
68
|
-
sigma:
|
|
76
|
+
sigma: SigmaTrace = {
|
|
69
77
|
"t": [],
|
|
70
78
|
"sigma_x": [],
|
|
71
79
|
"sigma_y": [],
|
|
@@ -78,13 +86,13 @@ def export_metrics(G, base_path: str, fmt: str = "csv") -> None:
|
|
|
78
86
|
sigma["sigma_y"].append(y)
|
|
79
87
|
sigma["mag"].append(m)
|
|
80
88
|
sigma["angle"].append(a)
|
|
81
|
-
morph = hist.get("morph", [])
|
|
82
|
-
epi_supp = hist.get("EPI_support", [])
|
|
89
|
+
morph: Sequence[Mapping[str, float]] = hist.get("morph", [])
|
|
90
|
+
epi_supp: Sequence[Mapping[str, float]] = hist.get("EPI_support", [])
|
|
83
91
|
fmt = fmt.lower()
|
|
84
92
|
if fmt not in {"csv", "json"}:
|
|
85
|
-
raise ValueError(f"
|
|
93
|
+
raise ValueError(f"Unsupported export format: {fmt}")
|
|
86
94
|
if fmt == "csv":
|
|
87
|
-
specs = [
|
|
95
|
+
specs: list[tuple[str, Sequence[str], Iterable[Sequence[object]]]] = [
|
|
88
96
|
(
|
|
89
97
|
"_glyphogram.csv",
|
|
90
98
|
["t", *GLYPHS_CANONICAL],
|
|
@@ -134,4 +142,8 @@ def export_metrics(G, base_path: str, fmt: str = "csv") -> None:
|
|
|
134
142
|
"epi_support": epi_supp,
|
|
135
143
|
}
|
|
136
144
|
json_path = base_path + ".json"
|
|
137
|
-
|
|
145
|
+
|
|
146
|
+
def _write_json(f: TextIO) -> None:
|
|
147
|
+
f.write(json_dumps(data))
|
|
148
|
+
|
|
149
|
+
safe_write(json_path, _write_json)
|
tnfr/metrics/glyph_timing.py
CHANGED
|
@@ -2,24 +2,74 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
+
import math
|
|
5
6
|
from collections import Counter, defaultdict
|
|
7
|
+
from concurrent.futures import ProcessPoolExecutor
|
|
6
8
|
from dataclasses import dataclass
|
|
7
|
-
from
|
|
9
|
+
from types import ModuleType
|
|
10
|
+
from typing import (
|
|
11
|
+
Any,
|
|
12
|
+
Callable,
|
|
13
|
+
Mapping,
|
|
14
|
+
MutableMapping,
|
|
15
|
+
Sequence,
|
|
16
|
+
cast,
|
|
17
|
+
)
|
|
8
18
|
|
|
9
19
|
from ..alias import get_attr
|
|
10
|
-
from ..constants import
|
|
11
|
-
from ..
|
|
12
|
-
from ..
|
|
13
|
-
from ..
|
|
20
|
+
from ..config.constants import GLYPH_GROUPS, GLYPHS_CANONICAL
|
|
21
|
+
from ..constants import get_param
|
|
22
|
+
from ..constants.aliases import ALIAS_EPI
|
|
23
|
+
from ..glyph_history import append_metric
|
|
24
|
+
from ..glyph_runtime import last_glyph
|
|
25
|
+
from ..utils import resolve_chunk_size
|
|
26
|
+
from ..types import (
|
|
27
|
+
GlyphCounts,
|
|
28
|
+
GlyphMetricsHistory,
|
|
29
|
+
GlyphMetricsHistoryValue,
|
|
30
|
+
GlyphTimingByNode,
|
|
31
|
+
GlyphTimingTotals,
|
|
32
|
+
GlyphogramRow,
|
|
33
|
+
GraphLike,
|
|
34
|
+
MetricsListHistory,
|
|
35
|
+
SigmaTrace,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
LATENT_GLYPH: str = "SHA"
|
|
39
|
+
DEFAULT_EPI_SUPPORT_LIMIT = 0.05
|
|
14
40
|
|
|
15
|
-
|
|
41
|
+
try: # pragma: no cover - import guard exercised via tests
|
|
42
|
+
import numpy as _np # type: ignore[import-not-found]
|
|
43
|
+
except Exception: # pragma: no cover - numpy optional dependency
|
|
44
|
+
_np = None
|
|
45
|
+
|
|
46
|
+
np: ModuleType | None = cast(ModuleType | None, _np)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _has_numpy_support(np_obj: object) -> bool:
|
|
50
|
+
"""Return ``True`` when ``np_obj`` exposes the required NumPy API."""
|
|
51
|
+
|
|
52
|
+
return isinstance(np_obj, ModuleType) or (
|
|
53
|
+
np_obj is not None
|
|
54
|
+
and hasattr(np_obj, "fromiter")
|
|
55
|
+
and hasattr(np_obj, "bincount")
|
|
56
|
+
)
|
|
57
|
+
_GLYPH_TO_INDEX = {glyph: idx for idx, glyph in enumerate(GLYPHS_CANONICAL)}
|
|
16
58
|
|
|
17
|
-
|
|
18
|
-
|
|
59
|
+
|
|
60
|
+
def _coerce_float(value: Any) -> float:
|
|
61
|
+
"""Attempt to coerce ``value`` to ``float`` returning ``0.0`` on failure."""
|
|
62
|
+
|
|
63
|
+
try:
|
|
64
|
+
return float(value)
|
|
65
|
+
except (TypeError, ValueError):
|
|
66
|
+
return 0.0
|
|
19
67
|
|
|
20
68
|
|
|
21
69
|
@dataclass
|
|
22
70
|
class GlyphTiming:
|
|
71
|
+
"""Mutable accumulator tracking the active glyph and its dwell time."""
|
|
72
|
+
|
|
23
73
|
curr: str | None = None
|
|
24
74
|
run: float = 0.0
|
|
25
75
|
|
|
@@ -27,6 +77,10 @@ class GlyphTiming:
|
|
|
27
77
|
__all__ = [
|
|
28
78
|
"LATENT_GLYPH",
|
|
29
79
|
"GlyphTiming",
|
|
80
|
+
"SigmaTrace",
|
|
81
|
+
"GlyphogramRow",
|
|
82
|
+
"GlyphTimingTotals",
|
|
83
|
+
"GlyphTimingByNode",
|
|
30
84
|
"_tg_state",
|
|
31
85
|
"for_each_glyph",
|
|
32
86
|
"_update_tg_node",
|
|
@@ -44,13 +98,34 @@ __all__ = [
|
|
|
44
98
|
# ---------------------------------------------------------------------------
|
|
45
99
|
|
|
46
100
|
|
|
47
|
-
def
|
|
101
|
+
def _count_glyphs_chunk(chunk: Sequence[str]) -> Counter[str]:
|
|
102
|
+
"""Count glyph occurrences within a chunk (multiprocessing helper)."""
|
|
103
|
+
|
|
104
|
+
counter: Counter[str] = Counter()
|
|
105
|
+
for glyph in chunk:
|
|
106
|
+
counter[glyph] += 1
|
|
107
|
+
return counter
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def _epi_support_chunk(values: Sequence[float], threshold: float) -> tuple[float, int]:
|
|
111
|
+
"""Compute EPI support contribution for a chunk."""
|
|
112
|
+
|
|
113
|
+
total = 0.0
|
|
114
|
+
count = 0
|
|
115
|
+
for value in values:
|
|
116
|
+
if value >= threshold:
|
|
117
|
+
total += value
|
|
118
|
+
count += 1
|
|
119
|
+
return total, count
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _tg_state(nd: MutableMapping[str, Any]) -> GlyphTiming:
|
|
48
123
|
"""Expose per-node glyph timing state."""
|
|
49
124
|
|
|
50
125
|
return nd.setdefault("_Tg", GlyphTiming())
|
|
51
126
|
|
|
52
127
|
|
|
53
|
-
def for_each_glyph(fn: Callable[[str],
|
|
128
|
+
def for_each_glyph(fn: Callable[[str], None]) -> None:
|
|
54
129
|
"""Apply ``fn`` to each canonical structural operator."""
|
|
55
130
|
|
|
56
131
|
for g in GLYPHS_CANONICAL:
|
|
@@ -62,7 +137,13 @@ def for_each_glyph(fn: Callable[[str], Any]) -> None:
|
|
|
62
137
|
# ---------------------------------------------------------------------------
|
|
63
138
|
|
|
64
139
|
|
|
65
|
-
def _update_tg_node(
|
|
140
|
+
def _update_tg_node(
|
|
141
|
+
n: Any,
|
|
142
|
+
nd: MutableMapping[str, Any],
|
|
143
|
+
dt: float,
|
|
144
|
+
tg_total: GlyphTimingTotals,
|
|
145
|
+
tg_by_node: GlyphTimingByNode | None,
|
|
146
|
+
) -> tuple[str | None, bool]:
|
|
66
147
|
"""Track a node's glyph transition and accumulate run time."""
|
|
67
148
|
|
|
68
149
|
g = last_glyph(nd)
|
|
@@ -85,19 +166,31 @@ def _update_tg_node(n, nd, dt, tg_total, tg_by_node):
|
|
|
85
166
|
return g, g == LATENT_GLYPH
|
|
86
167
|
|
|
87
168
|
|
|
88
|
-
def _update_tg(
|
|
169
|
+
def _update_tg(
|
|
170
|
+
G: GraphLike,
|
|
171
|
+
hist: GlyphMetricsHistory,
|
|
172
|
+
dt: float,
|
|
173
|
+
save_by_node: bool,
|
|
174
|
+
n_jobs: int | None = None,
|
|
175
|
+
) -> tuple[Counter[str], int, int]:
|
|
89
176
|
"""Accumulate glyph dwell times for the entire graph."""
|
|
90
177
|
|
|
91
|
-
|
|
92
|
-
tg_total = hist.setdefault("Tg_total", defaultdict(float))
|
|
178
|
+
tg_total = cast(GlyphTimingTotals, hist.setdefault("Tg_total", defaultdict(float)))
|
|
93
179
|
tg_by_node = (
|
|
94
|
-
|
|
180
|
+
cast(
|
|
181
|
+
GlyphTimingByNode,
|
|
182
|
+
hist.setdefault(
|
|
183
|
+
"Tg_by_node",
|
|
184
|
+
defaultdict(lambda: defaultdict(list)),
|
|
185
|
+
),
|
|
186
|
+
)
|
|
95
187
|
if save_by_node
|
|
96
188
|
else None
|
|
97
189
|
)
|
|
98
190
|
|
|
99
191
|
n_total = 0
|
|
100
192
|
n_latent = 0
|
|
193
|
+
glyph_sequence: list[str] = []
|
|
101
194
|
for n, nd in G.nodes(data=True):
|
|
102
195
|
g, is_latent = _update_tg_node(n, nd, dt, tg_total, tg_by_node)
|
|
103
196
|
if g is None:
|
|
@@ -105,56 +198,152 @@ def _update_tg(G, hist, dt, save_by_node: bool):
|
|
|
105
198
|
n_total += 1
|
|
106
199
|
if is_latent:
|
|
107
200
|
n_latent += 1
|
|
108
|
-
|
|
201
|
+
glyph_sequence.append(g)
|
|
202
|
+
|
|
203
|
+
counts: Counter[str] = Counter()
|
|
204
|
+
if not glyph_sequence:
|
|
205
|
+
return counts, n_total, n_latent
|
|
206
|
+
|
|
207
|
+
if _has_numpy_support(np):
|
|
208
|
+
glyph_idx = np.fromiter(
|
|
209
|
+
(_GLYPH_TO_INDEX[glyph] for glyph in glyph_sequence),
|
|
210
|
+
dtype=np.int64,
|
|
211
|
+
count=len(glyph_sequence),
|
|
212
|
+
)
|
|
213
|
+
freq = np.bincount(glyph_idx, minlength=len(GLYPHS_CANONICAL))
|
|
214
|
+
counts.update(
|
|
215
|
+
{
|
|
216
|
+
glyph: int(freq[_GLYPH_TO_INDEX[glyph]])
|
|
217
|
+
for glyph in GLYPHS_CANONICAL
|
|
218
|
+
if freq[_GLYPH_TO_INDEX[glyph]]
|
|
219
|
+
}
|
|
220
|
+
)
|
|
221
|
+
elif n_jobs is not None and n_jobs > 1 and len(glyph_sequence) > 1:
|
|
222
|
+
approx_chunk = math.ceil(len(glyph_sequence) / n_jobs) if n_jobs else None
|
|
223
|
+
chunk_size = resolve_chunk_size(
|
|
224
|
+
approx_chunk,
|
|
225
|
+
len(glyph_sequence),
|
|
226
|
+
minimum=1,
|
|
227
|
+
)
|
|
228
|
+
futures = []
|
|
229
|
+
with ProcessPoolExecutor(max_workers=n_jobs) as executor:
|
|
230
|
+
for start in range(0, len(glyph_sequence), chunk_size):
|
|
231
|
+
chunk = glyph_sequence[start : start + chunk_size]
|
|
232
|
+
futures.append(executor.submit(_count_glyphs_chunk, chunk))
|
|
233
|
+
for future in futures:
|
|
234
|
+
counts.update(future.result())
|
|
235
|
+
else:
|
|
236
|
+
counts.update(glyph_sequence)
|
|
237
|
+
|
|
109
238
|
return counts, n_total, n_latent
|
|
110
239
|
|
|
111
240
|
|
|
112
|
-
def _update_glyphogram(
|
|
241
|
+
def _update_glyphogram(
|
|
242
|
+
G: GraphLike,
|
|
243
|
+
hist: GlyphMetricsHistory,
|
|
244
|
+
counts: GlyphCounts,
|
|
245
|
+
t: float,
|
|
246
|
+
n_total: int,
|
|
247
|
+
) -> None:
|
|
113
248
|
"""Record glyphogram row from glyph counts."""
|
|
114
249
|
|
|
115
250
|
normalize_series = bool(get_param(G, "METRICS").get("normalize_series", False))
|
|
116
|
-
row = {"t": t}
|
|
251
|
+
row: GlyphogramRow = {"t": t}
|
|
117
252
|
total = max(1, n_total)
|
|
118
253
|
for g in GLYPHS_CANONICAL:
|
|
119
254
|
c = counts.get(g, 0)
|
|
120
255
|
row[g] = (c / total) if normalize_series else c
|
|
121
|
-
append_metric(hist, "glyphogram", row)
|
|
256
|
+
append_metric(cast(MetricsListHistory, hist), "glyphogram", row)
|
|
122
257
|
|
|
123
258
|
|
|
124
|
-
def _update_latency_index(
|
|
259
|
+
def _update_latency_index(
|
|
260
|
+
G: GraphLike,
|
|
261
|
+
hist: GlyphMetricsHistory,
|
|
262
|
+
n_total: int,
|
|
263
|
+
n_latent: int,
|
|
264
|
+
t: float,
|
|
265
|
+
) -> None:
|
|
125
266
|
"""Record latency index for the current step."""
|
|
126
267
|
|
|
127
268
|
li = n_latent / max(1, n_total)
|
|
128
|
-
append_metric(
|
|
269
|
+
append_metric(
|
|
270
|
+
cast(MetricsListHistory, hist),
|
|
271
|
+
"latency_index",
|
|
272
|
+
{"t": t, "value": li},
|
|
273
|
+
)
|
|
129
274
|
|
|
130
275
|
|
|
131
276
|
def _update_epi_support(
|
|
132
|
-
G,
|
|
133
|
-
hist,
|
|
134
|
-
t,
|
|
277
|
+
G: GraphLike,
|
|
278
|
+
hist: GlyphMetricsHistory,
|
|
279
|
+
t: float,
|
|
135
280
|
threshold: float = DEFAULT_EPI_SUPPORT_LIMIT,
|
|
136
|
-
|
|
281
|
+
n_jobs: int | None = None,
|
|
282
|
+
) -> None:
|
|
137
283
|
"""Measure EPI support and normalized magnitude."""
|
|
138
284
|
|
|
285
|
+
node_count = G.number_of_nodes()
|
|
139
286
|
total = 0.0
|
|
140
287
|
count = 0
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
288
|
+
|
|
289
|
+
if _has_numpy_support(np) and node_count:
|
|
290
|
+
epi_values = np.fromiter(
|
|
291
|
+
(
|
|
292
|
+
abs(_coerce_float(get_attr(nd, ALIAS_EPI, 0.0)))
|
|
293
|
+
for _, nd in G.nodes(data=True)
|
|
294
|
+
),
|
|
295
|
+
dtype=float,
|
|
296
|
+
count=node_count,
|
|
297
|
+
)
|
|
298
|
+
mask = epi_values >= threshold
|
|
299
|
+
count = int(mask.sum())
|
|
300
|
+
if count:
|
|
301
|
+
total = float(epi_values[mask].sum())
|
|
302
|
+
elif n_jobs is not None and n_jobs > 1 and node_count > 1:
|
|
303
|
+
values = [
|
|
304
|
+
abs(_coerce_float(get_attr(nd, ALIAS_EPI, 0.0)))
|
|
305
|
+
for _, nd in G.nodes(data=True)
|
|
306
|
+
]
|
|
307
|
+
approx_chunk = math.ceil(len(values) / n_jobs) if n_jobs else None
|
|
308
|
+
chunk_size = resolve_chunk_size(
|
|
309
|
+
approx_chunk,
|
|
310
|
+
len(values),
|
|
311
|
+
minimum=1,
|
|
312
|
+
)
|
|
313
|
+
totals: list[tuple[float, int]] = []
|
|
314
|
+
with ProcessPoolExecutor(max_workers=n_jobs) as executor:
|
|
315
|
+
futures = []
|
|
316
|
+
for start in range(0, len(values), chunk_size):
|
|
317
|
+
chunk = values[start : start + chunk_size]
|
|
318
|
+
futures.append(executor.submit(_epi_support_chunk, chunk, threshold))
|
|
319
|
+
for future in futures:
|
|
320
|
+
totals.append(future.result())
|
|
321
|
+
for part_total, part_count in totals:
|
|
322
|
+
total += part_total
|
|
323
|
+
count += part_count
|
|
324
|
+
else:
|
|
325
|
+
for _, nd in G.nodes(data=True):
|
|
326
|
+
epi_val = abs(_coerce_float(get_attr(nd, ALIAS_EPI, 0.0)))
|
|
327
|
+
if epi_val >= threshold:
|
|
328
|
+
total += epi_val
|
|
329
|
+
count += 1
|
|
146
330
|
epi_norm = (total / count) if count else 0.0
|
|
147
331
|
append_metric(
|
|
148
|
-
hist,
|
|
332
|
+
cast(MetricsListHistory, hist),
|
|
149
333
|
"EPI_support",
|
|
150
334
|
{"t": t, "size": count, "epi_norm": float(epi_norm)},
|
|
151
335
|
)
|
|
152
336
|
|
|
153
337
|
|
|
154
|
-
def _update_morph_metrics(
|
|
338
|
+
def _update_morph_metrics(
|
|
339
|
+
G: GraphLike,
|
|
340
|
+
hist: GlyphMetricsHistory,
|
|
341
|
+
counts: GlyphCounts,
|
|
342
|
+
t: float,
|
|
343
|
+
) -> None:
|
|
155
344
|
"""Capture morphosyntactic distribution of glyphs."""
|
|
156
345
|
|
|
157
|
-
def get_count(keys):
|
|
346
|
+
def get_count(keys: Sequence[str]) -> int:
|
|
158
347
|
return sum(counts.get(k, 0) for k in keys)
|
|
159
348
|
|
|
160
349
|
total = max(1, sum(counts.values()))
|
|
@@ -165,25 +354,26 @@ def _update_morph_metrics(G, hist, counts, t):
|
|
|
165
354
|
den = get_count(GLYPH_GROUPS.get("PP_den", ()))
|
|
166
355
|
pp_val = 0.0 if den == 0 else num / den
|
|
167
356
|
append_metric(
|
|
168
|
-
hist,
|
|
357
|
+
cast(MetricsListHistory, hist),
|
|
169
358
|
"morph",
|
|
170
359
|
{"t": t, "ID": id_val, "CM": cm_val, "NE": ne_val, "PP": pp_val},
|
|
171
360
|
)
|
|
172
361
|
|
|
173
362
|
|
|
174
363
|
def _compute_advanced_metrics(
|
|
175
|
-
G,
|
|
176
|
-
hist,
|
|
177
|
-
t,
|
|
178
|
-
dt,
|
|
179
|
-
cfg,
|
|
364
|
+
G: GraphLike,
|
|
365
|
+
hist: GlyphMetricsHistory,
|
|
366
|
+
t: float,
|
|
367
|
+
dt: float,
|
|
368
|
+
cfg: Mapping[str, Any],
|
|
180
369
|
threshold: float = DEFAULT_EPI_SUPPORT_LIMIT,
|
|
181
|
-
|
|
370
|
+
n_jobs: int | None = None,
|
|
371
|
+
) -> None:
|
|
182
372
|
"""Compute glyph timing derived metrics."""
|
|
183
373
|
|
|
184
374
|
save_by_node = bool(cfg.get("save_by_node", True))
|
|
185
|
-
counts, n_total, n_latent = _update_tg(G, hist, dt, save_by_node)
|
|
375
|
+
counts, n_total, n_latent = _update_tg(G, hist, dt, save_by_node, n_jobs=n_jobs)
|
|
186
376
|
_update_glyphogram(G, hist, counts, t, n_total)
|
|
187
377
|
_update_latency_index(G, hist, n_total, n_latent, t)
|
|
188
|
-
_update_epi_support(G, hist, t, threshold)
|
|
378
|
+
_update_epi_support(G, hist, t, threshold, n_jobs=n_jobs)
|
|
189
379
|
_update_morph_metrics(G, hist, counts, t)
|
tnfr/metrics/reporting.py
CHANGED
|
@@ -2,13 +2,14 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
from
|
|
6
|
-
|
|
5
|
+
from collections.abc import Sequence
|
|
7
6
|
from heapq import nlargest
|
|
8
|
-
from statistics import
|
|
7
|
+
from statistics import StatisticsError, fmean, mean
|
|
8
|
+
from typing import Any
|
|
9
9
|
|
|
10
10
|
from ..glyph_history import ensure_history
|
|
11
11
|
from ..sense import sigma_rose
|
|
12
|
+
from ..types import NodeId, TNFRGraph
|
|
12
13
|
from .glyph_timing import for_each_glyph
|
|
13
14
|
|
|
14
15
|
__all__ = [
|
|
@@ -26,7 +27,7 @@ __all__ = [
|
|
|
26
27
|
# ---------------------------------------------------------------------------
|
|
27
28
|
|
|
28
29
|
|
|
29
|
-
def Tg_global(G, normalize: bool = True) -> dict[str, float]:
|
|
30
|
+
def Tg_global(G: TNFRGraph, normalize: bool = True) -> dict[str, float]:
|
|
30
31
|
"""Total glyph dwell time per class."""
|
|
31
32
|
|
|
32
33
|
hist = ensure_history(G)
|
|
@@ -34,7 +35,7 @@ def Tg_global(G, normalize: bool = True) -> dict[str, float]:
|
|
|
34
35
|
total = sum(tg_total.values()) or 1.0
|
|
35
36
|
out: dict[str, float] = {}
|
|
36
37
|
|
|
37
|
-
def add(g):
|
|
38
|
+
def add(g: str) -> None:
|
|
38
39
|
val = float(tg_total.get(g, 0.0))
|
|
39
40
|
out[g] = val / total if normalize else val
|
|
40
41
|
|
|
@@ -42,30 +43,34 @@ def Tg_global(G, normalize: bool = True) -> dict[str, float]:
|
|
|
42
43
|
return out
|
|
43
44
|
|
|
44
45
|
|
|
45
|
-
def Tg_by_node(
|
|
46
|
+
def Tg_by_node(
|
|
47
|
+
G: TNFRGraph, n: NodeId, normalize: bool = False
|
|
48
|
+
) -> dict[str, float] | dict[str, list[float]]:
|
|
46
49
|
"""Per-node glyph dwell summary."""
|
|
47
50
|
|
|
48
51
|
hist = ensure_history(G)
|
|
49
52
|
rec = hist.get("Tg_by_node", {}).get(n, {})
|
|
50
53
|
if not normalize:
|
|
51
|
-
|
|
54
|
+
runs_out: dict[str, list[float]] = {}
|
|
52
55
|
|
|
53
|
-
def copy_runs(g):
|
|
54
|
-
|
|
56
|
+
def copy_runs(g: str) -> None:
|
|
57
|
+
runs_out[g] = list(rec.get(g, []))
|
|
55
58
|
|
|
56
59
|
for_each_glyph(copy_runs)
|
|
57
|
-
return
|
|
58
|
-
|
|
60
|
+
return runs_out
|
|
61
|
+
mean_out: dict[str, float] = {}
|
|
59
62
|
|
|
60
|
-
def add(g):
|
|
63
|
+
def add(g: str) -> None:
|
|
61
64
|
runs = rec.get(g, [])
|
|
62
|
-
|
|
65
|
+
mean_out[g] = float(mean(runs)) if runs else 0.0
|
|
63
66
|
|
|
64
67
|
for_each_glyph(add)
|
|
65
|
-
return
|
|
68
|
+
return mean_out
|
|
69
|
+
|
|
66
70
|
|
|
71
|
+
def latency_series(G: TNFRGraph) -> dict[str, list[float]]:
|
|
72
|
+
"""Return latency samples as ``{"t": [...], "value": [...]}``."""
|
|
67
73
|
|
|
68
|
-
def latency_series(G) -> dict[str, list[float]]:
|
|
69
74
|
hist = ensure_history(G)
|
|
70
75
|
xs = hist.get("latency_index", [])
|
|
71
76
|
return {
|
|
@@ -74,21 +79,25 @@ def latency_series(G) -> dict[str, list[float]]:
|
|
|
74
79
|
}
|
|
75
80
|
|
|
76
81
|
|
|
77
|
-
def glyphogram_series(G) -> dict[str, list[float]]:
|
|
82
|
+
def glyphogram_series(G: TNFRGraph) -> dict[str, list[float]]:
|
|
83
|
+
"""Return glyphogram time series keyed by glyph label."""
|
|
84
|
+
|
|
78
85
|
hist = ensure_history(G)
|
|
79
86
|
xs = hist.get("glyphogram", [])
|
|
80
87
|
if not xs:
|
|
81
88
|
return {"t": []}
|
|
82
|
-
out: dict[str, list[float]] = {
|
|
89
|
+
out: dict[str, list[float]] = {
|
|
90
|
+
"t": [float(x.get("t", i)) for i, x in enumerate(xs)]
|
|
91
|
+
}
|
|
83
92
|
|
|
84
|
-
def add(g):
|
|
93
|
+
def add(g: str) -> None:
|
|
85
94
|
out[g] = [float(x.get(g, 0.0)) for x in xs]
|
|
86
95
|
|
|
87
96
|
for_each_glyph(add)
|
|
88
97
|
return out
|
|
89
98
|
|
|
90
99
|
|
|
91
|
-
def glyph_top(G, k: int = 3) -> list[tuple[str, float]]:
|
|
100
|
+
def glyph_top(G: TNFRGraph, k: int = 3) -> list[tuple[str, float]]:
|
|
92
101
|
"""Top-k structural operators by ``Tg_global`` fraction."""
|
|
93
102
|
|
|
94
103
|
k = int(k)
|
|
@@ -99,8 +108,10 @@ def glyph_top(G, k: int = 3) -> list[tuple[str, float]]:
|
|
|
99
108
|
|
|
100
109
|
|
|
101
110
|
def build_metrics_summary(
|
|
102
|
-
G, *, series_limit: int | None = None
|
|
103
|
-
) -> tuple[
|
|
111
|
+
G: TNFRGraph, *, series_limit: int | None = None
|
|
112
|
+
) -> tuple[
|
|
113
|
+
dict[str, float | dict[str, float] | dict[str, list[float]] | dict[str, int]], bool
|
|
114
|
+
]:
|
|
104
115
|
"""Collect a compact metrics summary for CLI reporting.
|
|
105
116
|
|
|
106
117
|
Parameters
|
|
@@ -131,7 +142,7 @@ def build_metrics_summary(
|
|
|
131
142
|
if limit <= 0:
|
|
132
143
|
limit = None
|
|
133
144
|
|
|
134
|
-
def _trim(values:
|
|
145
|
+
def _trim(values: Sequence[Any]) -> list[Any]:
|
|
135
146
|
seq = list(values)
|
|
136
147
|
if limit is None:
|
|
137
148
|
return seq
|