tnfr 4.5.2__py3-none-any.whl → 7.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +275 -51
- tnfr/__init__.pyi +33 -0
- tnfr/_compat.py +10 -0
- tnfr/_generated_version.py +34 -0
- tnfr/_version.py +49 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +117 -31
- tnfr/alias.pyi +108 -0
- tnfr/cache.py +6 -572
- tnfr/cache.pyi +16 -0
- tnfr/callback_utils.py +16 -38
- tnfr/callback_utils.pyi +79 -0
- tnfr/cli/__init__.py +34 -14
- tnfr/cli/__init__.pyi +26 -0
- tnfr/cli/arguments.py +211 -28
- tnfr/cli/arguments.pyi +27 -0
- tnfr/cli/execution.py +470 -50
- tnfr/cli/execution.pyi +70 -0
- tnfr/cli/utils.py +18 -3
- tnfr/cli/utils.pyi +8 -0
- tnfr/config/__init__.py +13 -0
- tnfr/config/__init__.pyi +10 -0
- tnfr/{constants_glyphs.py → config/constants.py} +26 -20
- tnfr/config/constants.pyi +12 -0
- tnfr/config/feature_flags.py +83 -0
- tnfr/{config.py → config/init.py} +11 -7
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +93 -0
- tnfr/config/operator_names.pyi +28 -0
- tnfr/config/presets.py +84 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/constants/__init__.py +80 -29
- tnfr/constants/__init__.pyi +92 -0
- tnfr/constants/aliases.py +31 -0
- tnfr/constants/core.py +4 -4
- tnfr/constants/core.pyi +17 -0
- tnfr/constants/init.py +1 -1
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +7 -15
- tnfr/constants/metric.pyi +19 -0
- tnfr/dynamics/__init__.py +165 -633
- tnfr/dynamics/__init__.pyi +82 -0
- tnfr/dynamics/adaptation.py +267 -0
- tnfr/dynamics/aliases.py +23 -0
- tnfr/dynamics/coordination.py +385 -0
- tnfr/dynamics/dnfr.py +2283 -400
- tnfr/dynamics/dnfr.pyi +24 -0
- tnfr/dynamics/integrators.py +406 -98
- tnfr/dynamics/integrators.pyi +34 -0
- tnfr/dynamics/runtime.py +881 -0
- tnfr/dynamics/sampling.py +10 -5
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +719 -0
- tnfr/execution.py +70 -48
- tnfr/execution.pyi +45 -0
- tnfr/flatten.py +13 -9
- tnfr/flatten.pyi +21 -0
- tnfr/gamma.py +66 -53
- tnfr/gamma.pyi +34 -0
- tnfr/glyph_history.py +110 -52
- tnfr/glyph_history.pyi +35 -0
- tnfr/glyph_runtime.py +16 -0
- tnfr/glyph_runtime.pyi +9 -0
- tnfr/immutable.py +69 -28
- tnfr/immutable.pyi +34 -0
- tnfr/initialization.py +16 -16
- tnfr/initialization.pyi +65 -0
- tnfr/io.py +6 -240
- tnfr/io.pyi +16 -0
- tnfr/locking.pyi +7 -0
- tnfr/mathematics/__init__.py +81 -0
- tnfr/mathematics/backend.py +426 -0
- tnfr/mathematics/dynamics.py +398 -0
- tnfr/mathematics/epi.py +254 -0
- tnfr/mathematics/generators.py +222 -0
- tnfr/mathematics/metrics.py +119 -0
- tnfr/mathematics/operators.py +233 -0
- tnfr/mathematics/operators_factory.py +71 -0
- tnfr/mathematics/projection.py +78 -0
- tnfr/mathematics/runtime.py +173 -0
- tnfr/mathematics/spaces.py +247 -0
- tnfr/mathematics/transforms.py +292 -0
- tnfr/metrics/__init__.py +10 -10
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/coherence.py +993 -324
- tnfr/metrics/common.py +23 -16
- tnfr/metrics/common.pyi +46 -0
- tnfr/metrics/core.py +251 -35
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +708 -111
- tnfr/metrics/diagnosis.pyi +85 -0
- tnfr/metrics/export.py +27 -15
- tnfr/metrics/glyph_timing.py +232 -42
- tnfr/metrics/reporting.py +33 -22
- tnfr/metrics/reporting.pyi +12 -0
- tnfr/metrics/sense_index.py +987 -43
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +214 -23
- tnfr/metrics/trig.pyi +13 -0
- tnfr/metrics/trig_cache.py +115 -22
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/node.py +542 -136
- tnfr/node.pyi +178 -0
- tnfr/observers.py +152 -35
- tnfr/observers.pyi +31 -0
- tnfr/ontosim.py +23 -19
- tnfr/ontosim.pyi +28 -0
- tnfr/operators/__init__.py +601 -82
- tnfr/operators/__init__.pyi +45 -0
- tnfr/operators/definitions.py +513 -0
- tnfr/operators/definitions.pyi +78 -0
- tnfr/operators/grammar.py +760 -0
- tnfr/operators/jitter.py +107 -38
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/registry.py +75 -0
- tnfr/operators/registry.pyi +13 -0
- tnfr/operators/remesh.py +149 -88
- tnfr/py.typed +0 -0
- tnfr/rng.py +46 -143
- tnfr/rng.pyi +14 -0
- tnfr/schemas/__init__.py +8 -0
- tnfr/schemas/grammar.json +94 -0
- tnfr/selector.py +25 -19
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +72 -62
- tnfr/sense.pyi +23 -0
- tnfr/structural.py +522 -262
- tnfr/structural.pyi +69 -0
- tnfr/telemetry/__init__.py +35 -0
- tnfr/telemetry/cache_metrics.py +226 -0
- tnfr/telemetry/nu_f.py +423 -0
- tnfr/telemetry/nu_f.pyi +123 -0
- tnfr/telemetry/verbosity.py +37 -0
- tnfr/tokens.py +1 -3
- tnfr/tokens.pyi +36 -0
- tnfr/trace.py +270 -113
- tnfr/trace.pyi +40 -0
- tnfr/types.py +574 -6
- tnfr/types.pyi +331 -0
- tnfr/units.py +69 -0
- tnfr/units.pyi +16 -0
- tnfr/utils/__init__.py +217 -0
- tnfr/utils/__init__.pyi +202 -0
- tnfr/utils/cache.py +2395 -0
- tnfr/utils/cache.pyi +468 -0
- tnfr/utils/chunks.py +104 -0
- tnfr/utils/chunks.pyi +21 -0
- tnfr/{collections_utils.py → utils/data.py} +147 -90
- tnfr/utils/data.pyi +64 -0
- tnfr/utils/graph.py +85 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +770 -0
- tnfr/utils/init.pyi +78 -0
- tnfr/utils/io.py +456 -0
- tnfr/{helpers → utils}/numeric.py +51 -24
- tnfr/utils/numeric.pyi +21 -0
- tnfr/validation/__init__.py +113 -0
- tnfr/validation/__init__.pyi +77 -0
- tnfr/validation/compatibility.py +95 -0
- tnfr/validation/compatibility.pyi +6 -0
- tnfr/validation/grammar.py +71 -0
- tnfr/validation/grammar.pyi +40 -0
- tnfr/validation/graph.py +138 -0
- tnfr/validation/graph.pyi +17 -0
- tnfr/validation/rules.py +281 -0
- tnfr/validation/rules.pyi +55 -0
- tnfr/validation/runtime.py +263 -0
- tnfr/validation/runtime.pyi +31 -0
- tnfr/validation/soft_filters.py +170 -0
- tnfr/validation/soft_filters.pyi +37 -0
- tnfr/validation/spectral.py +159 -0
- tnfr/validation/spectral.pyi +46 -0
- tnfr/validation/syntax.py +40 -0
- tnfr/validation/syntax.pyi +10 -0
- tnfr/validation/window.py +39 -0
- tnfr/validation/window.pyi +1 -0
- tnfr/viz/__init__.py +9 -0
- tnfr/viz/matplotlib.py +246 -0
- tnfr-7.0.0.dist-info/METADATA +179 -0
- tnfr-7.0.0.dist-info/RECORD +185 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/licenses/LICENSE.md +1 -1
- tnfr/grammar.py +0 -344
- tnfr/graph_utils.py +0 -84
- tnfr/helpers/__init__.py +0 -71
- tnfr/import_utils.py +0 -228
- tnfr/json_utils.py +0 -162
- tnfr/logging_utils.py +0 -116
- tnfr/presets.py +0 -60
- tnfr/validators.py +0 -84
- tnfr/value_utils.py +0 -59
- tnfr-4.5.2.dist-info/METADATA +0 -379
- tnfr-4.5.2.dist-info/RECORD +0 -67
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/WHEEL +0 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/top_level.txt +0 -0
tnfr/node.py
CHANGED
|
@@ -1,118 +1,191 @@
|
|
|
1
1
|
"""Node utilities and structures for TNFR graphs."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
|
|
4
|
+
|
|
5
|
+
import copy
|
|
6
6
|
import math
|
|
7
|
+
from collections.abc import Hashable
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from typing import (
|
|
10
|
+
Any,
|
|
11
|
+
Callable,
|
|
12
|
+
Iterable,
|
|
13
|
+
Mapping,
|
|
14
|
+
MutableMapping,
|
|
15
|
+
Optional,
|
|
16
|
+
Protocol,
|
|
17
|
+
Sequence,
|
|
18
|
+
SupportsFloat,
|
|
19
|
+
TypeVar,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
import numpy as np
|
|
7
23
|
|
|
8
|
-
from .constants import get_aliases
|
|
9
24
|
from .alias import (
|
|
10
25
|
get_attr,
|
|
11
26
|
get_attr_str,
|
|
27
|
+
get_theta_attr,
|
|
12
28
|
set_attr,
|
|
13
29
|
set_attr_str,
|
|
14
|
-
|
|
30
|
+
set_attr_generic,
|
|
15
31
|
set_dnfr,
|
|
16
32
|
set_theta,
|
|
33
|
+
set_vf,
|
|
34
|
+
)
|
|
35
|
+
from .config import context_flags, get_flags
|
|
36
|
+
from .constants.aliases import (
|
|
37
|
+
ALIAS_D2EPI,
|
|
38
|
+
ALIAS_DNFR,
|
|
39
|
+
ALIAS_EPI,
|
|
40
|
+
ALIAS_EPI_KIND,
|
|
41
|
+
ALIAS_SI,
|
|
42
|
+
ALIAS_THETA,
|
|
43
|
+
ALIAS_VF,
|
|
17
44
|
)
|
|
18
|
-
from .
|
|
45
|
+
from .mathematics import (
|
|
46
|
+
BasicStateProjector,
|
|
47
|
+
CoherenceOperator,
|
|
48
|
+
FrequencyOperator,
|
|
49
|
+
HilbertSpace,
|
|
50
|
+
NFRValidator,
|
|
51
|
+
StateProjector,
|
|
52
|
+
)
|
|
53
|
+
from .mathematics.operators_factory import make_coherence_operator, make_frequency_operator
|
|
54
|
+
from .mathematics.runtime import (
|
|
55
|
+
coherence as runtime_coherence,
|
|
56
|
+
frequency_positive as runtime_frequency_positive,
|
|
57
|
+
normalized as runtime_normalized,
|
|
58
|
+
stable_unitary as runtime_stable_unitary,
|
|
59
|
+
)
|
|
60
|
+
from .locking import get_lock
|
|
61
|
+
from .types import (
|
|
62
|
+
CouplingWeight,
|
|
63
|
+
DeltaNFR,
|
|
64
|
+
EPIValue,
|
|
65
|
+
NodeId,
|
|
66
|
+
Phase,
|
|
67
|
+
SecondDerivativeEPI,
|
|
68
|
+
SenseIndex,
|
|
69
|
+
StructuralFrequency,
|
|
70
|
+
TNFRGraph,
|
|
71
|
+
ZERO_BEPI_STORAGE,
|
|
72
|
+
ensure_bepi,
|
|
73
|
+
serialize_bepi,
|
|
74
|
+
)
|
|
75
|
+
from .utils import (
|
|
19
76
|
cached_node_list,
|
|
20
77
|
ensure_node_offset_map,
|
|
78
|
+
get_logger,
|
|
21
79
|
increment_edge_version,
|
|
80
|
+
supports_add_edge,
|
|
22
81
|
)
|
|
23
|
-
from .graph_utils import supports_add_edge
|
|
24
|
-
from .locking import get_lock
|
|
25
82
|
|
|
26
|
-
|
|
27
|
-
ALIAS_VF = get_aliases("VF")
|
|
28
|
-
ALIAS_THETA = get_aliases("THETA")
|
|
29
|
-
ALIAS_SI = get_aliases("SI")
|
|
30
|
-
ALIAS_EPI_KIND = get_aliases("EPI_KIND")
|
|
31
|
-
ALIAS_DNFR = get_aliases("DNFR")
|
|
32
|
-
ALIAS_D2EPI = get_aliases("D2EPI")
|
|
83
|
+
T = TypeVar("T")
|
|
33
84
|
|
|
34
|
-
|
|
35
|
-
# descriptors. Each entry defines the keyword arguments passed to
|
|
36
|
-
# ``_nx_attr_property`` for a given attribute name.
|
|
37
|
-
ATTR_SPECS: dict[str, dict] = {
|
|
38
|
-
"EPI": {"aliases": ALIAS_EPI},
|
|
39
|
-
"vf": {
|
|
40
|
-
"aliases": ALIAS_VF,
|
|
41
|
-
"setter": set_vf,
|
|
42
|
-
"use_graph_setter": True,
|
|
43
|
-
},
|
|
44
|
-
"theta": {
|
|
45
|
-
"aliases": ALIAS_THETA,
|
|
46
|
-
"setter": set_theta,
|
|
47
|
-
"use_graph_setter": True,
|
|
48
|
-
},
|
|
49
|
-
"Si": {"aliases": ALIAS_SI},
|
|
50
|
-
"epi_kind": {
|
|
51
|
-
"aliases": ALIAS_EPI_KIND,
|
|
52
|
-
"default": "",
|
|
53
|
-
"getter": get_attr_str,
|
|
54
|
-
"setter": set_attr_str,
|
|
55
|
-
"to_python": str,
|
|
56
|
-
"to_storage": str,
|
|
57
|
-
},
|
|
58
|
-
"dnfr": {
|
|
59
|
-
"aliases": ALIAS_DNFR,
|
|
60
|
-
"setter": set_dnfr,
|
|
61
|
-
"use_graph_setter": True,
|
|
62
|
-
},
|
|
63
|
-
"d2EPI": {"aliases": ALIAS_D2EPI},
|
|
64
|
-
}
|
|
85
|
+
__all__ = ("NodeNX", "NodeProtocol", "add_edge")
|
|
65
86
|
|
|
66
|
-
T = TypeVar("T")
|
|
67
87
|
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
to_storage=float,
|
|
79
|
-
use_graph_setter=False,
|
|
80
|
-
):
|
|
81
|
-
"""Generate ``NodoNX`` property descriptors.
|
|
82
|
-
|
|
83
|
-
Parameters
|
|
84
|
-
----------
|
|
85
|
-
aliases:
|
|
86
|
-
Immutable tuple of aliases used to access the attribute in the
|
|
87
|
-
underlying ``networkx`` node.
|
|
88
|
-
default:
|
|
89
|
-
Value returned when the attribute is missing.
|
|
90
|
-
getter, setter:
|
|
91
|
-
Helper functions used to retrieve or store the value. ``setter`` can
|
|
92
|
-
either accept ``(mapping, aliases, value)`` or, when
|
|
93
|
-
``use_graph_setter`` is ``True``, ``(G, n, value)``.
|
|
94
|
-
to_python, to_storage:
|
|
95
|
-
Conversion helpers applied when getting or setting the value,
|
|
96
|
-
respectively.
|
|
97
|
-
use_graph_setter:
|
|
98
|
-
Whether ``setter`` expects ``(G, n, value)`` instead of
|
|
99
|
-
``(mapping, aliases, value)``.
|
|
88
|
+
LOGGER = get_logger(__name__)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
@dataclass(frozen=True)
|
|
92
|
+
class AttrSpec:
|
|
93
|
+
"""Configuration required to expose a ``networkx`` node attribute.
|
|
94
|
+
|
|
95
|
+
``AttrSpec`` mirrors the defaults previously used by
|
|
96
|
+
:func:`_nx_attr_property` and centralises the descriptor generation
|
|
97
|
+
logic to keep a single source of truth for NodeNX attribute access.
|
|
100
98
|
"""
|
|
101
99
|
|
|
102
|
-
|
|
103
|
-
|
|
100
|
+
aliases: tuple[str, ...]
|
|
101
|
+
default: Any = 0.0
|
|
102
|
+
getter: Callable[[MutableMapping[str, Any], tuple[str, ...], Any], Any] = get_attr
|
|
103
|
+
setter: Callable[..., None] = set_attr
|
|
104
|
+
to_python: Callable[[Any], Any] = float
|
|
105
|
+
to_storage: Callable[[Any], Any] = float
|
|
106
|
+
use_graph_setter: bool = False
|
|
107
|
+
|
|
108
|
+
def build_property(self) -> property:
|
|
109
|
+
"""Create the property descriptor for ``NodeNX`` attributes."""
|
|
110
|
+
|
|
111
|
+
def fget(instance: "NodeNX") -> T:
|
|
112
|
+
return self.to_python(
|
|
113
|
+
self.getter(instance.G.nodes[instance.n], self.aliases, self.default)
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
def fset(instance: "NodeNX", value: T) -> None:
|
|
117
|
+
value = self.to_storage(value)
|
|
118
|
+
if self.use_graph_setter:
|
|
119
|
+
self.setter(instance.G, instance.n, value)
|
|
120
|
+
else:
|
|
121
|
+
self.setter(instance.G.nodes[instance.n], self.aliases, value)
|
|
122
|
+
|
|
123
|
+
return property(fget, fset)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
# Canonical adapters for BEPI storage ------------------------------------
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def _epi_to_python(value: Any) -> EPIValue:
|
|
130
|
+
if value is None:
|
|
131
|
+
raise ValueError("EPI attribute is required for BEPI nodes")
|
|
132
|
+
return ensure_bepi(value)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def _epi_to_storage(value: Any) -> Mapping[str, tuple[complex, ...] | tuple[float, ...]]:
|
|
136
|
+
return serialize_bepi(value)
|
|
104
137
|
|
|
105
|
-
def fset(self, value: T) -> None:
|
|
106
|
-
value = to_storage(value)
|
|
107
|
-
if use_graph_setter:
|
|
108
|
-
setter(self.G, self.n, value)
|
|
109
|
-
else:
|
|
110
|
-
setter(self.G.nodes[self.n], aliases, value)
|
|
111
138
|
|
|
112
|
-
|
|
139
|
+
def _get_bepi_attr(
|
|
140
|
+
mapping: Mapping[str, Any], aliases: tuple[str, ...], default: Any
|
|
141
|
+
) -> Any:
|
|
142
|
+
return get_attr(mapping, aliases, default, conv=lambda obj: obj)
|
|
113
143
|
|
|
114
144
|
|
|
115
|
-
def
|
|
145
|
+
def _set_bepi_attr(
|
|
146
|
+
mapping: MutableMapping[str, Any], aliases: tuple[str, ...], value: Any
|
|
147
|
+
) -> Mapping[str, tuple[complex, ...] | tuple[float, ...]]:
|
|
148
|
+
return set_attr_generic(mapping, aliases, value, conv=lambda obj: obj)
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
# Mapping of NodeNX attribute specifications used to generate property
|
|
152
|
+
# descriptors. Each entry defines the keyword arguments passed to
|
|
153
|
+
# ``AttrSpec.build_property`` for a given attribute name.
|
|
154
|
+
ATTR_SPECS: dict[str, AttrSpec] = {
|
|
155
|
+
"EPI": AttrSpec(
|
|
156
|
+
aliases=ALIAS_EPI,
|
|
157
|
+
default=ZERO_BEPI_STORAGE,
|
|
158
|
+
getter=_get_bepi_attr,
|
|
159
|
+
to_python=_epi_to_python,
|
|
160
|
+
to_storage=_epi_to_storage,
|
|
161
|
+
setter=_set_bepi_attr,
|
|
162
|
+
),
|
|
163
|
+
"vf": AttrSpec(aliases=ALIAS_VF, setter=set_vf, use_graph_setter=True),
|
|
164
|
+
"theta": AttrSpec(
|
|
165
|
+
aliases=ALIAS_THETA,
|
|
166
|
+
getter=lambda mapping, _aliases, default: get_theta_attr(mapping, default),
|
|
167
|
+
setter=set_theta,
|
|
168
|
+
use_graph_setter=True,
|
|
169
|
+
),
|
|
170
|
+
"Si": AttrSpec(aliases=ALIAS_SI),
|
|
171
|
+
"epi_kind": AttrSpec(
|
|
172
|
+
aliases=ALIAS_EPI_KIND,
|
|
173
|
+
default="",
|
|
174
|
+
getter=get_attr_str,
|
|
175
|
+
setter=set_attr_str,
|
|
176
|
+
to_python=str,
|
|
177
|
+
to_storage=str,
|
|
178
|
+
),
|
|
179
|
+
"dnfr": AttrSpec(aliases=ALIAS_DNFR, setter=set_dnfr, use_graph_setter=True),
|
|
180
|
+
"d2EPI": AttrSpec(aliases=ALIAS_D2EPI),
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def _add_edge_common(
|
|
185
|
+
n1: NodeId,
|
|
186
|
+
n2: NodeId,
|
|
187
|
+
weight: CouplingWeight | SupportsFloat | str,
|
|
188
|
+
) -> Optional[CouplingWeight]:
|
|
116
189
|
"""Validate basic edge constraints.
|
|
117
190
|
|
|
118
191
|
Returns the parsed weight if the edge can be added. ``None`` is returned
|
|
@@ -132,12 +205,12 @@ def _add_edge_common(n1, n2, weight) -> Optional[float]:
|
|
|
132
205
|
|
|
133
206
|
|
|
134
207
|
def add_edge(
|
|
135
|
-
graph,
|
|
136
|
-
n1,
|
|
137
|
-
n2,
|
|
138
|
-
weight,
|
|
208
|
+
graph: TNFRGraph,
|
|
209
|
+
n1: NodeId,
|
|
210
|
+
n2: NodeId,
|
|
211
|
+
weight: CouplingWeight | SupportsFloat | str,
|
|
139
212
|
overwrite: bool = False,
|
|
140
|
-
):
|
|
213
|
+
) -> None:
|
|
141
214
|
"""Add an edge between ``n1`` and ``n2`` in a ``networkx`` graph."""
|
|
142
215
|
|
|
143
216
|
weight = _add_edge_common(n1, n2, weight)
|
|
@@ -154,60 +227,188 @@ def add_edge(
|
|
|
154
227
|
increment_edge_version(graph)
|
|
155
228
|
|
|
156
229
|
|
|
157
|
-
class
|
|
230
|
+
class NodeProtocol(Protocol):
|
|
158
231
|
"""Minimal protocol for TNFR nodes."""
|
|
159
232
|
|
|
160
|
-
EPI:
|
|
161
|
-
vf:
|
|
162
|
-
theta:
|
|
163
|
-
Si:
|
|
233
|
+
EPI: EPIValue
|
|
234
|
+
vf: StructuralFrequency
|
|
235
|
+
theta: Phase
|
|
236
|
+
Si: SenseIndex
|
|
164
237
|
epi_kind: str
|
|
165
|
-
dnfr:
|
|
166
|
-
d2EPI:
|
|
167
|
-
graph:
|
|
238
|
+
dnfr: DeltaNFR
|
|
239
|
+
d2EPI: SecondDerivativeEPI
|
|
240
|
+
graph: MutableMapping[str, Any]
|
|
241
|
+
|
|
242
|
+
def neighbors(self) -> Iterable[NodeProtocol | Hashable]:
|
|
243
|
+
"""Iterate structural neighbours coupled to this node."""
|
|
244
|
+
|
|
245
|
+
...
|
|
168
246
|
|
|
169
|
-
def
|
|
247
|
+
def _glyph_storage(self) -> MutableMapping[str, object]:
|
|
248
|
+
"""Return the mutable mapping storing glyph metadata."""
|
|
170
249
|
|
|
171
|
-
|
|
250
|
+
...
|
|
172
251
|
|
|
173
|
-
def has_edge(self, other: "
|
|
252
|
+
def has_edge(self, other: "NodeProtocol") -> bool:
|
|
253
|
+
"""Return ``True`` when an edge connects this node to ``other``."""
|
|
254
|
+
|
|
255
|
+
...
|
|
174
256
|
|
|
175
257
|
def add_edge(
|
|
176
|
-
self,
|
|
177
|
-
|
|
258
|
+
self,
|
|
259
|
+
other: NodeProtocol,
|
|
260
|
+
weight: CouplingWeight,
|
|
261
|
+
*,
|
|
262
|
+
overwrite: bool = False,
|
|
263
|
+
) -> None:
|
|
264
|
+
"""Couple ``other`` using ``weight`` optionally replacing existing links."""
|
|
265
|
+
|
|
266
|
+
...
|
|
267
|
+
|
|
268
|
+
def offset(self) -> int:
|
|
269
|
+
"""Return the node offset index within the canonical ordering."""
|
|
270
|
+
|
|
271
|
+
...
|
|
178
272
|
|
|
179
|
-
def
|
|
273
|
+
def all_nodes(self) -> Iterable[NodeProtocol]:
|
|
274
|
+
"""Iterate all nodes of the attached graph as :class:`NodeProtocol` objects."""
|
|
180
275
|
|
|
181
|
-
|
|
276
|
+
...
|
|
182
277
|
|
|
183
278
|
|
|
184
|
-
class
|
|
279
|
+
class NodeNX(NodeProtocol):
|
|
185
280
|
"""Adapter for ``networkx`` nodes."""
|
|
186
281
|
|
|
187
|
-
# Statically defined property descriptors for ``
|
|
282
|
+
# Statically defined property descriptors for ``NodeNX`` attributes.
|
|
188
283
|
# Declaring them here makes the attributes discoverable by type checkers
|
|
189
284
|
# and IDEs, avoiding the previous runtime ``setattr`` loop.
|
|
190
|
-
EPI:
|
|
191
|
-
vf:
|
|
192
|
-
theta:
|
|
193
|
-
Si:
|
|
194
|
-
epi_kind: str =
|
|
195
|
-
dnfr:
|
|
196
|
-
d2EPI:
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
285
|
+
EPI: EPIValue = ATTR_SPECS["EPI"].build_property()
|
|
286
|
+
vf: StructuralFrequency = ATTR_SPECS["vf"].build_property()
|
|
287
|
+
theta: Phase = ATTR_SPECS["theta"].build_property()
|
|
288
|
+
Si: SenseIndex = ATTR_SPECS["Si"].build_property()
|
|
289
|
+
epi_kind: str = ATTR_SPECS["epi_kind"].build_property()
|
|
290
|
+
dnfr: DeltaNFR = ATTR_SPECS["dnfr"].build_property()
|
|
291
|
+
d2EPI: SecondDerivativeEPI = ATTR_SPECS["d2EPI"].build_property()
|
|
292
|
+
|
|
293
|
+
@staticmethod
|
|
294
|
+
def _prepare_coherence_operator(
|
|
295
|
+
operator: CoherenceOperator | None,
|
|
296
|
+
*,
|
|
297
|
+
dim: int | None = None,
|
|
298
|
+
spectrum: Sequence[float] | np.ndarray | None = None,
|
|
299
|
+
c_min: float | None = None,
|
|
300
|
+
) -> CoherenceOperator | None:
|
|
301
|
+
if operator is not None:
|
|
302
|
+
return operator
|
|
303
|
+
|
|
304
|
+
spectrum_array: np.ndarray | None
|
|
305
|
+
if spectrum is None:
|
|
306
|
+
spectrum_array = None
|
|
307
|
+
else:
|
|
308
|
+
spectrum_array = np.asarray(spectrum, dtype=np.complex128)
|
|
309
|
+
if spectrum_array.ndim != 1:
|
|
310
|
+
raise ValueError("Coherence spectrum must be one-dimensional.")
|
|
311
|
+
|
|
312
|
+
effective_dim = dim
|
|
313
|
+
if spectrum_array is not None:
|
|
314
|
+
spectrum_length = spectrum_array.shape[0]
|
|
315
|
+
if effective_dim is None:
|
|
316
|
+
effective_dim = int(spectrum_length)
|
|
317
|
+
elif spectrum_length != int(effective_dim):
|
|
318
|
+
raise ValueError("Coherence spectrum size mismatch with requested dimension.")
|
|
319
|
+
|
|
320
|
+
if effective_dim is None:
|
|
321
|
+
return None
|
|
322
|
+
|
|
323
|
+
kwargs: dict[str, Any] = {}
|
|
324
|
+
if spectrum_array is not None:
|
|
325
|
+
kwargs["spectrum"] = spectrum_array
|
|
326
|
+
if c_min is not None:
|
|
327
|
+
kwargs["c_min"] = float(c_min)
|
|
328
|
+
return make_coherence_operator(int(effective_dim), **kwargs)
|
|
329
|
+
|
|
330
|
+
@staticmethod
|
|
331
|
+
def _prepare_frequency_operator(
|
|
332
|
+
operator: FrequencyOperator | None,
|
|
333
|
+
*,
|
|
334
|
+
matrix: Sequence[Sequence[complex]] | np.ndarray | None = None,
|
|
335
|
+
) -> FrequencyOperator | None:
|
|
336
|
+
if operator is not None:
|
|
337
|
+
return operator
|
|
338
|
+
if matrix is None:
|
|
339
|
+
return None
|
|
340
|
+
return make_frequency_operator(np.asarray(matrix, dtype=np.complex128))
|
|
341
|
+
|
|
342
|
+
def __init__(
|
|
343
|
+
self,
|
|
344
|
+
G: TNFRGraph,
|
|
345
|
+
n: NodeId,
|
|
346
|
+
*,
|
|
347
|
+
state_projector: StateProjector | None = None,
|
|
348
|
+
enable_math_validation: Optional[bool] = None,
|
|
349
|
+
hilbert_space: HilbertSpace | None = None,
|
|
350
|
+
coherence_operator: CoherenceOperator | None = None,
|
|
351
|
+
coherence_dim: int | None = None,
|
|
352
|
+
coherence_spectrum: Sequence[float] | np.ndarray | None = None,
|
|
353
|
+
coherence_c_min: float | None = None,
|
|
354
|
+
frequency_operator: FrequencyOperator | None = None,
|
|
355
|
+
frequency_matrix: Sequence[Sequence[complex]] | np.ndarray | None = None,
|
|
356
|
+
coherence_threshold: float | None = None,
|
|
357
|
+
validator: NFRValidator | None = None,
|
|
358
|
+
rng: np.random.Generator | None = None,
|
|
359
|
+
) -> None:
|
|
360
|
+
self.G: TNFRGraph = G
|
|
361
|
+
self.n: NodeId = n
|
|
362
|
+
self.graph: MutableMapping[str, Any] = G.graph
|
|
363
|
+
self.state_projector: StateProjector = state_projector or BasicStateProjector()
|
|
364
|
+
self._math_validation_override: Optional[bool] = enable_math_validation
|
|
365
|
+
if enable_math_validation is None:
|
|
366
|
+
effective_validation = get_flags().enable_math_validation
|
|
367
|
+
else:
|
|
368
|
+
effective_validation = bool(enable_math_validation)
|
|
369
|
+
self.enable_math_validation: bool = effective_validation
|
|
370
|
+
default_dimension = (
|
|
371
|
+
G.number_of_nodes() if hasattr(G, "number_of_nodes") else len(tuple(G.nodes))
|
|
372
|
+
)
|
|
373
|
+
default_dimension = max(1, int(default_dimension))
|
|
374
|
+
self.hilbert_space: HilbertSpace = hilbert_space or HilbertSpace(default_dimension)
|
|
375
|
+
if coherence_operator is not None and (
|
|
376
|
+
coherence_dim is not None
|
|
377
|
+
or coherence_spectrum is not None
|
|
378
|
+
or coherence_c_min is not None
|
|
379
|
+
):
|
|
380
|
+
raise ValueError(
|
|
381
|
+
"Provide either a coherence operator or factory parameters, not both."
|
|
382
|
+
)
|
|
383
|
+
if frequency_operator is not None and frequency_matrix is not None:
|
|
384
|
+
raise ValueError(
|
|
385
|
+
"Provide either a frequency operator or frequency matrix, not both."
|
|
386
|
+
)
|
|
387
|
+
|
|
388
|
+
self.coherence_operator: CoherenceOperator | None = self._prepare_coherence_operator(
|
|
389
|
+
coherence_operator,
|
|
390
|
+
dim=coherence_dim,
|
|
391
|
+
spectrum=coherence_spectrum,
|
|
392
|
+
c_min=coherence_c_min,
|
|
393
|
+
)
|
|
394
|
+
self.frequency_operator: FrequencyOperator | None = self._prepare_frequency_operator(
|
|
395
|
+
frequency_operator,
|
|
396
|
+
matrix=frequency_matrix,
|
|
397
|
+
)
|
|
398
|
+
self.coherence_threshold: float | None = (
|
|
399
|
+
float(coherence_threshold) if coherence_threshold is not None else None
|
|
400
|
+
)
|
|
401
|
+
self.validator: NFRValidator | None = validator
|
|
402
|
+
self.rng: np.random.Generator | None = rng
|
|
202
403
|
G.graph.setdefault("_node_cache", {})[n] = self
|
|
203
404
|
|
|
204
|
-
def _glyph_storage(self):
|
|
405
|
+
def _glyph_storage(self) -> MutableMapping[str, Any]:
|
|
205
406
|
return self.G.nodes[self.n]
|
|
206
407
|
|
|
207
408
|
@classmethod
|
|
208
|
-
def from_graph(cls, G, n):
|
|
209
|
-
"""Return cached ``
|
|
210
|
-
lock = get_lock(f"
|
|
409
|
+
def from_graph(cls, G: TNFRGraph, n: NodeId) -> "NodeNX":
|
|
410
|
+
"""Return cached ``NodeNX`` for ``(G, n)`` with thread safety."""
|
|
411
|
+
lock = get_lock(f"node_nx_cache_{id(G)}")
|
|
211
412
|
with lock:
|
|
212
413
|
cache = G.graph.setdefault("_node_cache", {})
|
|
213
414
|
node = cache.get(n)
|
|
@@ -215,23 +416,31 @@ class NodoNX(NodoProtocol):
|
|
|
215
416
|
node = cls(G, n)
|
|
216
417
|
return node
|
|
217
418
|
|
|
218
|
-
def neighbors(self) -> Iterable[
|
|
419
|
+
def neighbors(self) -> Iterable[NodeId]:
|
|
219
420
|
"""Iterate neighbour identifiers (IDs).
|
|
220
421
|
|
|
221
422
|
Wrap each resulting ID with :meth:`from_graph` to obtain the cached
|
|
222
|
-
``
|
|
423
|
+
``NodeNX`` instance when actual node objects are required.
|
|
223
424
|
"""
|
|
224
425
|
return self.G.neighbors(self.n)
|
|
225
426
|
|
|
226
|
-
def has_edge(self, other:
|
|
227
|
-
|
|
427
|
+
def has_edge(self, other: NodeProtocol) -> bool:
|
|
428
|
+
"""Return ``True`` when an edge connects this node to ``other``."""
|
|
429
|
+
|
|
430
|
+
if isinstance(other, NodeNX):
|
|
228
431
|
return self.G.has_edge(self.n, other.n)
|
|
229
432
|
raise NotImplementedError
|
|
230
433
|
|
|
231
434
|
def add_edge(
|
|
232
|
-
self,
|
|
435
|
+
self,
|
|
436
|
+
other: NodeProtocol,
|
|
437
|
+
weight: CouplingWeight,
|
|
438
|
+
*,
|
|
439
|
+
overwrite: bool = False,
|
|
233
440
|
) -> None:
|
|
234
|
-
|
|
441
|
+
"""Couple ``other`` using ``weight`` optionally replacing existing links."""
|
|
442
|
+
|
|
443
|
+
if isinstance(other, NodeNX):
|
|
235
444
|
add_edge(
|
|
236
445
|
self.G,
|
|
237
446
|
self.n,
|
|
@@ -243,15 +452,212 @@ class NodoNX(NodoProtocol):
|
|
|
243
452
|
raise NotImplementedError
|
|
244
453
|
|
|
245
454
|
def offset(self) -> int:
|
|
455
|
+
"""Return the cached node offset within the canonical ordering."""
|
|
456
|
+
|
|
246
457
|
mapping = ensure_node_offset_map(self.G)
|
|
247
458
|
return mapping.get(self.n, 0)
|
|
248
459
|
|
|
249
|
-
def all_nodes(self) -> Iterable[
|
|
460
|
+
def all_nodes(self) -> Iterable[NodeProtocol]:
|
|
461
|
+
"""Iterate all nodes of ``self.G`` as ``NodeNX`` adapters."""
|
|
462
|
+
|
|
250
463
|
override = self.graph.get("_all_nodes")
|
|
251
464
|
if override is not None:
|
|
252
465
|
return override
|
|
253
466
|
|
|
254
467
|
nodes = cached_node_list(self.G)
|
|
255
|
-
return tuple(
|
|
256
|
-
|
|
257
|
-
|
|
468
|
+
return tuple(NodeNX.from_graph(self.G, v) for v in nodes)
|
|
469
|
+
|
|
470
|
+
def run_sequence_with_validation(
|
|
471
|
+
self,
|
|
472
|
+
ops: Iterable[Callable[[TNFRGraph, NodeId], None]],
|
|
473
|
+
*,
|
|
474
|
+
projector: StateProjector | None = None,
|
|
475
|
+
hilbert_space: HilbertSpace | None = None,
|
|
476
|
+
coherence_operator: CoherenceOperator | None = None,
|
|
477
|
+
coherence_dim: int | None = None,
|
|
478
|
+
coherence_spectrum: Sequence[float] | np.ndarray | None = None,
|
|
479
|
+
coherence_c_min: float | None = None,
|
|
480
|
+
coherence_threshold: float | None = None,
|
|
481
|
+
frequency_operator: FrequencyOperator | None = None,
|
|
482
|
+
frequency_matrix: Sequence[Sequence[complex]] | np.ndarray | None = None,
|
|
483
|
+
validator: NFRValidator | None = None,
|
|
484
|
+
enforce_frequency_positivity: bool | None = None,
|
|
485
|
+
enable_validation: bool | None = None,
|
|
486
|
+
rng: np.random.Generator | None = None,
|
|
487
|
+
log_metrics: bool = False,
|
|
488
|
+
) -> dict[str, Any]:
|
|
489
|
+
"""Run ``ops`` then return pre/post metrics with optional validation."""
|
|
490
|
+
|
|
491
|
+
from .structural import run_sequence as structural_run_sequence
|
|
492
|
+
|
|
493
|
+
projector = projector or self.state_projector
|
|
494
|
+
hilbert = hilbert_space or self.hilbert_space
|
|
495
|
+
|
|
496
|
+
effective_coherence = (
|
|
497
|
+
self._prepare_coherence_operator(
|
|
498
|
+
coherence_operator,
|
|
499
|
+
dim=coherence_dim,
|
|
500
|
+
spectrum=coherence_spectrum,
|
|
501
|
+
c_min=(
|
|
502
|
+
coherence_c_min
|
|
503
|
+
if coherence_c_min is not None
|
|
504
|
+
else (
|
|
505
|
+
self.coherence_operator.c_min
|
|
506
|
+
if self.coherence_operator is not None
|
|
507
|
+
else None
|
|
508
|
+
)
|
|
509
|
+
),
|
|
510
|
+
)
|
|
511
|
+
if any(
|
|
512
|
+
parameter is not None
|
|
513
|
+
for parameter in (
|
|
514
|
+
coherence_operator,
|
|
515
|
+
coherence_dim,
|
|
516
|
+
coherence_spectrum,
|
|
517
|
+
coherence_c_min,
|
|
518
|
+
)
|
|
519
|
+
)
|
|
520
|
+
else self.coherence_operator
|
|
521
|
+
)
|
|
522
|
+
effective_freq = (
|
|
523
|
+
self._prepare_frequency_operator(
|
|
524
|
+
frequency_operator,
|
|
525
|
+
matrix=frequency_matrix,
|
|
526
|
+
)
|
|
527
|
+
if frequency_operator is not None or frequency_matrix is not None
|
|
528
|
+
else self.frequency_operator
|
|
529
|
+
)
|
|
530
|
+
threshold = (
|
|
531
|
+
float(coherence_threshold)
|
|
532
|
+
if coherence_threshold is not None
|
|
533
|
+
else self.coherence_threshold
|
|
534
|
+
)
|
|
535
|
+
validator = validator or self.validator
|
|
536
|
+
rng = rng or self.rng
|
|
537
|
+
|
|
538
|
+
if enable_validation is None:
|
|
539
|
+
if self._math_validation_override is not None:
|
|
540
|
+
should_validate = bool(self._math_validation_override)
|
|
541
|
+
else:
|
|
542
|
+
should_validate = bool(get_flags().enable_math_validation)
|
|
543
|
+
else:
|
|
544
|
+
should_validate = bool(enable_validation)
|
|
545
|
+
self.enable_math_validation = should_validate
|
|
546
|
+
|
|
547
|
+
enforce_frequency = (
|
|
548
|
+
bool(enforce_frequency_positivity)
|
|
549
|
+
if enforce_frequency_positivity is not None
|
|
550
|
+
else bool(effective_freq is not None)
|
|
551
|
+
)
|
|
552
|
+
|
|
553
|
+
def _project(epi: float, vf: float, theta: float) -> np.ndarray:
|
|
554
|
+
local_rng = None
|
|
555
|
+
if rng is not None:
|
|
556
|
+
bit_generator = rng.bit_generator
|
|
557
|
+
cloned_state = copy.deepcopy(bit_generator.state)
|
|
558
|
+
local_bit_generator = type(bit_generator)()
|
|
559
|
+
local_bit_generator.state = cloned_state
|
|
560
|
+
local_rng = np.random.Generator(local_bit_generator)
|
|
561
|
+
vector = projector(
|
|
562
|
+
epi=epi,
|
|
563
|
+
nu_f=vf,
|
|
564
|
+
theta=theta,
|
|
565
|
+
dim=hilbert.dimension,
|
|
566
|
+
rng=local_rng,
|
|
567
|
+
)
|
|
568
|
+
return np.asarray(vector, dtype=np.complex128)
|
|
569
|
+
|
|
570
|
+
active_flags = get_flags()
|
|
571
|
+
should_log_metrics = bool(log_metrics and active_flags.log_performance)
|
|
572
|
+
|
|
573
|
+
def _metrics(state: np.ndarray, label: str) -> dict[str, Any]:
|
|
574
|
+
metrics: dict[str, Any] = {}
|
|
575
|
+
with context_flags(log_performance=False):
|
|
576
|
+
norm_passed, norm_value = runtime_normalized(state, hilbert, label=label)
|
|
577
|
+
metrics["normalized"] = bool(norm_passed)
|
|
578
|
+
metrics["norm"] = float(norm_value)
|
|
579
|
+
if effective_coherence is not None and threshold is not None:
|
|
580
|
+
coh_passed, coh_value = runtime_coherence(
|
|
581
|
+
state, effective_coherence, threshold, label=label
|
|
582
|
+
)
|
|
583
|
+
metrics["coherence"] = bool(coh_passed)
|
|
584
|
+
metrics["coherence_expectation"] = float(coh_value)
|
|
585
|
+
metrics["coherence_threshold"] = float(threshold)
|
|
586
|
+
if effective_freq is not None:
|
|
587
|
+
freq_summary = runtime_frequency_positive(
|
|
588
|
+
state,
|
|
589
|
+
effective_freq,
|
|
590
|
+
enforce=enforce_frequency,
|
|
591
|
+
label=label,
|
|
592
|
+
)
|
|
593
|
+
metrics["frequency_positive"] = bool(freq_summary["passed"])
|
|
594
|
+
metrics["frequency_expectation"] = float(freq_summary["value"])
|
|
595
|
+
metrics["frequency_projection_passed"] = bool(
|
|
596
|
+
freq_summary["projection_passed"]
|
|
597
|
+
)
|
|
598
|
+
metrics["frequency_spectrum_psd"] = bool(freq_summary["spectrum_psd"])
|
|
599
|
+
metrics["frequency_spectrum_min"] = float(freq_summary["spectrum_min"])
|
|
600
|
+
metrics["frequency_enforced"] = bool(freq_summary["enforce"])
|
|
601
|
+
if effective_coherence is not None:
|
|
602
|
+
unitary_passed, unitary_norm = runtime_stable_unitary(
|
|
603
|
+
state,
|
|
604
|
+
effective_coherence,
|
|
605
|
+
hilbert,
|
|
606
|
+
label=label,
|
|
607
|
+
)
|
|
608
|
+
metrics["stable_unitary"] = bool(unitary_passed)
|
|
609
|
+
metrics["stable_unitary_norm_after"] = float(unitary_norm)
|
|
610
|
+
if should_log_metrics:
|
|
611
|
+
LOGGER.debug(
|
|
612
|
+
"node_metrics.%s normalized=%s coherence=%s frequency_positive=%s stable_unitary=%s coherence_expectation=%s frequency_expectation=%s",
|
|
613
|
+
label,
|
|
614
|
+
metrics.get("normalized"),
|
|
615
|
+
metrics.get("coherence"),
|
|
616
|
+
metrics.get("frequency_positive"),
|
|
617
|
+
metrics.get("stable_unitary"),
|
|
618
|
+
metrics.get("coherence_expectation"),
|
|
619
|
+
metrics.get("frequency_expectation"),
|
|
620
|
+
)
|
|
621
|
+
return metrics
|
|
622
|
+
|
|
623
|
+
pre_state = _project(self.EPI, self.vf, self.theta)
|
|
624
|
+
pre_metrics = _metrics(pre_state, "pre")
|
|
625
|
+
|
|
626
|
+
structural_run_sequence(self.G, self.n, ops)
|
|
627
|
+
|
|
628
|
+
post_state = _project(self.EPI, self.vf, self.theta)
|
|
629
|
+
post_metrics = _metrics(post_state, "post")
|
|
630
|
+
|
|
631
|
+
validation_summary: dict[str, Any] | None = None
|
|
632
|
+
if should_validate:
|
|
633
|
+
validator_instance = validator
|
|
634
|
+
if validator_instance is None:
|
|
635
|
+
if effective_coherence is None:
|
|
636
|
+
raise ValueError("Validation requires a coherence operator.")
|
|
637
|
+
validator_instance = NFRValidator(
|
|
638
|
+
hilbert,
|
|
639
|
+
effective_coherence,
|
|
640
|
+
threshold if threshold is not None else 0.0,
|
|
641
|
+
frequency_operator=effective_freq,
|
|
642
|
+
)
|
|
643
|
+
outcome = validator_instance.validate(
|
|
644
|
+
post_state,
|
|
645
|
+
enforce_frequency_positivity=enforce_frequency,
|
|
646
|
+
)
|
|
647
|
+
validation_summary = {
|
|
648
|
+
"passed": bool(outcome.passed),
|
|
649
|
+
"summary": outcome.summary,
|
|
650
|
+
"report": validator_instance.report(outcome),
|
|
651
|
+
}
|
|
652
|
+
|
|
653
|
+
result = {
|
|
654
|
+
"pre_state": pre_state,
|
|
655
|
+
"post_state": post_state,
|
|
656
|
+
"pre_metrics": pre_metrics,
|
|
657
|
+
"post_metrics": post_metrics,
|
|
658
|
+
"validation": validation_summary,
|
|
659
|
+
}
|
|
660
|
+
# Preserve legacy structure for downstream compatibility.
|
|
661
|
+
result["pre"] = {"state": pre_state, "metrics": pre_metrics}
|
|
662
|
+
result["post"] = {"state": post_state, "metrics": post_metrics}
|
|
663
|
+
return result
|