tnfr 3.0.3__py3-none-any.whl → 8.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +375 -56
- tnfr/__init__.pyi +33 -0
- tnfr/_compat.py +10 -0
- tnfr/_generated_version.py +34 -0
- tnfr/_version.py +49 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +723 -0
- tnfr/alias.pyi +108 -0
- tnfr/backends/__init__.py +354 -0
- tnfr/backends/jax_backend.py +173 -0
- tnfr/backends/numpy_backend.py +238 -0
- tnfr/backends/optimized_numpy.py +420 -0
- tnfr/backends/torch_backend.py +408 -0
- tnfr/cache.py +171 -0
- tnfr/cache.pyi +13 -0
- tnfr/cli/__init__.py +110 -0
- tnfr/cli/__init__.pyi +26 -0
- tnfr/cli/arguments.py +489 -0
- tnfr/cli/arguments.pyi +29 -0
- tnfr/cli/execution.py +914 -0
- tnfr/cli/execution.pyi +70 -0
- tnfr/cli/interactive_validator.py +614 -0
- tnfr/cli/utils.py +51 -0
- tnfr/cli/utils.pyi +7 -0
- tnfr/cli/validate.py +236 -0
- tnfr/compat/__init__.py +85 -0
- tnfr/compat/dataclass.py +136 -0
- tnfr/compat/jsonschema_stub.py +61 -0
- tnfr/compat/matplotlib_stub.py +73 -0
- tnfr/compat/numpy_stub.py +155 -0
- tnfr/config/__init__.py +224 -0
- tnfr/config/__init__.pyi +10 -0
- tnfr/config/constants.py +104 -0
- tnfr/config/constants.pyi +12 -0
- tnfr/config/defaults.py +54 -0
- tnfr/config/defaults_core.py +212 -0
- tnfr/config/defaults_init.py +33 -0
- tnfr/config/defaults_metric.py +104 -0
- tnfr/config/feature_flags.py +81 -0
- tnfr/config/feature_flags.pyi +16 -0
- tnfr/config/glyph_constants.py +31 -0
- tnfr/config/init.py +77 -0
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +254 -0
- tnfr/config/operator_names.pyi +36 -0
- tnfr/config/physics_derivation.py +354 -0
- tnfr/config/presets.py +83 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/config/security.py +927 -0
- tnfr/config/thresholds.py +114 -0
- tnfr/config/tnfr_config.py +498 -0
- tnfr/constants/__init__.py +92 -0
- tnfr/constants/__init__.pyi +92 -0
- tnfr/constants/aliases.py +33 -0
- tnfr/constants/aliases.pyi +27 -0
- tnfr/constants/init.py +33 -0
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +104 -0
- tnfr/constants/metric.pyi +19 -0
- tnfr/core/__init__.py +33 -0
- tnfr/core/container.py +226 -0
- tnfr/core/default_implementations.py +329 -0
- tnfr/core/interfaces.py +279 -0
- tnfr/dynamics/__init__.py +238 -0
- tnfr/dynamics/__init__.pyi +83 -0
- tnfr/dynamics/adaptation.py +267 -0
- tnfr/dynamics/adaptation.pyi +7 -0
- tnfr/dynamics/adaptive_sequences.py +189 -0
- tnfr/dynamics/adaptive_sequences.pyi +14 -0
- tnfr/dynamics/aliases.py +23 -0
- tnfr/dynamics/aliases.pyi +19 -0
- tnfr/dynamics/bifurcation.py +232 -0
- tnfr/dynamics/canonical.py +229 -0
- tnfr/dynamics/canonical.pyi +48 -0
- tnfr/dynamics/coordination.py +385 -0
- tnfr/dynamics/coordination.pyi +25 -0
- tnfr/dynamics/dnfr.py +3034 -0
- tnfr/dynamics/dnfr.pyi +26 -0
- tnfr/dynamics/dynamic_limits.py +225 -0
- tnfr/dynamics/feedback.py +252 -0
- tnfr/dynamics/feedback.pyi +24 -0
- tnfr/dynamics/fused_dnfr.py +454 -0
- tnfr/dynamics/homeostasis.py +157 -0
- tnfr/dynamics/homeostasis.pyi +14 -0
- tnfr/dynamics/integrators.py +661 -0
- tnfr/dynamics/integrators.pyi +36 -0
- tnfr/dynamics/learning.py +310 -0
- tnfr/dynamics/learning.pyi +33 -0
- tnfr/dynamics/metabolism.py +254 -0
- tnfr/dynamics/nbody.py +796 -0
- tnfr/dynamics/nbody_tnfr.py +783 -0
- tnfr/dynamics/propagation.py +326 -0
- tnfr/dynamics/runtime.py +908 -0
- tnfr/dynamics/runtime.pyi +77 -0
- tnfr/dynamics/sampling.py +36 -0
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +711 -0
- tnfr/dynamics/selectors.pyi +85 -0
- tnfr/dynamics/structural_clip.py +207 -0
- tnfr/errors/__init__.py +37 -0
- tnfr/errors/contextual.py +492 -0
- tnfr/execution.py +223 -0
- tnfr/execution.pyi +45 -0
- tnfr/extensions/__init__.py +205 -0
- tnfr/extensions/__init__.pyi +18 -0
- tnfr/extensions/base.py +173 -0
- tnfr/extensions/base.pyi +35 -0
- tnfr/extensions/business/__init__.py +71 -0
- tnfr/extensions/business/__init__.pyi +11 -0
- tnfr/extensions/business/cookbook.py +88 -0
- tnfr/extensions/business/cookbook.pyi +8 -0
- tnfr/extensions/business/health_analyzers.py +202 -0
- tnfr/extensions/business/health_analyzers.pyi +9 -0
- tnfr/extensions/business/patterns.py +183 -0
- tnfr/extensions/business/patterns.pyi +8 -0
- tnfr/extensions/medical/__init__.py +73 -0
- tnfr/extensions/medical/__init__.pyi +11 -0
- tnfr/extensions/medical/cookbook.py +88 -0
- tnfr/extensions/medical/cookbook.pyi +8 -0
- tnfr/extensions/medical/health_analyzers.py +181 -0
- tnfr/extensions/medical/health_analyzers.pyi +9 -0
- tnfr/extensions/medical/patterns.py +163 -0
- tnfr/extensions/medical/patterns.pyi +8 -0
- tnfr/flatten.py +262 -0
- tnfr/flatten.pyi +21 -0
- tnfr/gamma.py +354 -0
- tnfr/gamma.pyi +36 -0
- tnfr/glyph_history.py +377 -0
- tnfr/glyph_history.pyi +35 -0
- tnfr/glyph_runtime.py +19 -0
- tnfr/glyph_runtime.pyi +8 -0
- tnfr/immutable.py +218 -0
- tnfr/immutable.pyi +36 -0
- tnfr/initialization.py +203 -0
- tnfr/initialization.pyi +65 -0
- tnfr/io.py +10 -0
- tnfr/io.pyi +13 -0
- tnfr/locking.py +37 -0
- tnfr/locking.pyi +7 -0
- tnfr/mathematics/__init__.py +79 -0
- tnfr/mathematics/backend.py +453 -0
- tnfr/mathematics/backend.pyi +99 -0
- tnfr/mathematics/dynamics.py +408 -0
- tnfr/mathematics/dynamics.pyi +90 -0
- tnfr/mathematics/epi.py +391 -0
- tnfr/mathematics/epi.pyi +65 -0
- tnfr/mathematics/generators.py +242 -0
- tnfr/mathematics/generators.pyi +29 -0
- tnfr/mathematics/metrics.py +119 -0
- tnfr/mathematics/metrics.pyi +16 -0
- tnfr/mathematics/operators.py +239 -0
- tnfr/mathematics/operators.pyi +59 -0
- tnfr/mathematics/operators_factory.py +124 -0
- tnfr/mathematics/operators_factory.pyi +11 -0
- tnfr/mathematics/projection.py +87 -0
- tnfr/mathematics/projection.pyi +33 -0
- tnfr/mathematics/runtime.py +182 -0
- tnfr/mathematics/runtime.pyi +64 -0
- tnfr/mathematics/spaces.py +256 -0
- tnfr/mathematics/spaces.pyi +83 -0
- tnfr/mathematics/transforms.py +305 -0
- tnfr/mathematics/transforms.pyi +62 -0
- tnfr/metrics/__init__.py +79 -0
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/buffer_cache.py +163 -0
- tnfr/metrics/buffer_cache.pyi +24 -0
- tnfr/metrics/cache_utils.py +214 -0
- tnfr/metrics/coherence.py +2009 -0
- tnfr/metrics/coherence.pyi +129 -0
- tnfr/metrics/common.py +158 -0
- tnfr/metrics/common.pyi +35 -0
- tnfr/metrics/core.py +316 -0
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +833 -0
- tnfr/metrics/diagnosis.pyi +86 -0
- tnfr/metrics/emergence.py +245 -0
- tnfr/metrics/export.py +179 -0
- tnfr/metrics/export.pyi +7 -0
- tnfr/metrics/glyph_timing.py +379 -0
- tnfr/metrics/glyph_timing.pyi +81 -0
- tnfr/metrics/learning_metrics.py +280 -0
- tnfr/metrics/learning_metrics.pyi +21 -0
- tnfr/metrics/phase_coherence.py +351 -0
- tnfr/metrics/phase_compatibility.py +349 -0
- tnfr/metrics/reporting.py +183 -0
- tnfr/metrics/reporting.pyi +25 -0
- tnfr/metrics/sense_index.py +1203 -0
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +373 -0
- tnfr/metrics/trig.pyi +13 -0
- tnfr/metrics/trig_cache.py +233 -0
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/multiscale/__init__.py +32 -0
- tnfr/multiscale/hierarchical.py +517 -0
- tnfr/node.py +763 -0
- tnfr/node.pyi +139 -0
- tnfr/observers.py +255 -130
- tnfr/observers.pyi +31 -0
- tnfr/ontosim.py +144 -137
- tnfr/ontosim.pyi +28 -0
- tnfr/operators/__init__.py +1672 -0
- tnfr/operators/__init__.pyi +31 -0
- tnfr/operators/algebra.py +277 -0
- tnfr/operators/canonical_patterns.py +420 -0
- tnfr/operators/cascade.py +267 -0
- tnfr/operators/cycle_detection.py +358 -0
- tnfr/operators/definitions.py +4108 -0
- tnfr/operators/definitions.pyi +78 -0
- tnfr/operators/grammar.py +1164 -0
- tnfr/operators/grammar.pyi +140 -0
- tnfr/operators/hamiltonian.py +710 -0
- tnfr/operators/health_analyzer.py +809 -0
- tnfr/operators/jitter.py +272 -0
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/lifecycle.py +314 -0
- tnfr/operators/metabolism.py +618 -0
- tnfr/operators/metrics.py +2138 -0
- tnfr/operators/network_analysis/__init__.py +27 -0
- tnfr/operators/network_analysis/source_detection.py +186 -0
- tnfr/operators/nodal_equation.py +395 -0
- tnfr/operators/pattern_detection.py +660 -0
- tnfr/operators/patterns.py +669 -0
- tnfr/operators/postconditions/__init__.py +38 -0
- tnfr/operators/postconditions/mutation.py +236 -0
- tnfr/operators/preconditions/__init__.py +1226 -0
- tnfr/operators/preconditions/coherence.py +305 -0
- tnfr/operators/preconditions/dissonance.py +236 -0
- tnfr/operators/preconditions/emission.py +128 -0
- tnfr/operators/preconditions/mutation.py +580 -0
- tnfr/operators/preconditions/reception.py +125 -0
- tnfr/operators/preconditions/resonance.py +364 -0
- tnfr/operators/registry.py +74 -0
- tnfr/operators/registry.pyi +9 -0
- tnfr/operators/remesh.py +1809 -0
- tnfr/operators/remesh.pyi +26 -0
- tnfr/operators/structural_units.py +268 -0
- tnfr/operators/unified_grammar.py +105 -0
- tnfr/parallel/__init__.py +54 -0
- tnfr/parallel/auto_scaler.py +234 -0
- tnfr/parallel/distributed.py +384 -0
- tnfr/parallel/engine.py +238 -0
- tnfr/parallel/gpu_engine.py +420 -0
- tnfr/parallel/monitoring.py +248 -0
- tnfr/parallel/partitioner.py +459 -0
- tnfr/py.typed +0 -0
- tnfr/recipes/__init__.py +22 -0
- tnfr/recipes/cookbook.py +743 -0
- tnfr/rng.py +178 -0
- tnfr/rng.pyi +26 -0
- tnfr/schemas/__init__.py +8 -0
- tnfr/schemas/grammar.json +94 -0
- tnfr/sdk/__init__.py +107 -0
- tnfr/sdk/__init__.pyi +19 -0
- tnfr/sdk/adaptive_system.py +173 -0
- tnfr/sdk/adaptive_system.pyi +21 -0
- tnfr/sdk/builders.py +370 -0
- tnfr/sdk/builders.pyi +51 -0
- tnfr/sdk/fluent.py +1121 -0
- tnfr/sdk/fluent.pyi +74 -0
- tnfr/sdk/templates.py +342 -0
- tnfr/sdk/templates.pyi +41 -0
- tnfr/sdk/utils.py +341 -0
- tnfr/secure_config.py +46 -0
- tnfr/security/__init__.py +70 -0
- tnfr/security/database.py +514 -0
- tnfr/security/subprocess.py +503 -0
- tnfr/security/validation.py +290 -0
- tnfr/selector.py +247 -0
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +378 -0
- tnfr/sense.pyi +23 -0
- tnfr/services/__init__.py +17 -0
- tnfr/services/orchestrator.py +325 -0
- tnfr/sparse/__init__.py +39 -0
- tnfr/sparse/representations.py +492 -0
- tnfr/structural.py +705 -0
- tnfr/structural.pyi +83 -0
- tnfr/telemetry/__init__.py +35 -0
- tnfr/telemetry/cache_metrics.py +226 -0
- tnfr/telemetry/cache_metrics.pyi +64 -0
- tnfr/telemetry/nu_f.py +422 -0
- tnfr/telemetry/nu_f.pyi +108 -0
- tnfr/telemetry/verbosity.py +36 -0
- tnfr/telemetry/verbosity.pyi +15 -0
- tnfr/tokens.py +58 -0
- tnfr/tokens.pyi +36 -0
- tnfr/tools/__init__.py +20 -0
- tnfr/tools/domain_templates.py +478 -0
- tnfr/tools/sequence_generator.py +846 -0
- tnfr/topology/__init__.py +13 -0
- tnfr/topology/asymmetry.py +151 -0
- tnfr/trace.py +543 -0
- tnfr/trace.pyi +42 -0
- tnfr/tutorials/__init__.py +38 -0
- tnfr/tutorials/autonomous_evolution.py +285 -0
- tnfr/tutorials/interactive.py +1576 -0
- tnfr/tutorials/structural_metabolism.py +238 -0
- tnfr/types.py +775 -0
- tnfr/types.pyi +357 -0
- tnfr/units.py +68 -0
- tnfr/units.pyi +13 -0
- tnfr/utils/__init__.py +282 -0
- tnfr/utils/__init__.pyi +215 -0
- tnfr/utils/cache.py +4223 -0
- tnfr/utils/cache.pyi +470 -0
- tnfr/utils/callbacks.py +375 -0
- tnfr/utils/callbacks.pyi +49 -0
- tnfr/utils/chunks.py +108 -0
- tnfr/utils/chunks.pyi +22 -0
- tnfr/utils/data.py +428 -0
- tnfr/utils/data.pyi +74 -0
- tnfr/utils/graph.py +85 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +821 -0
- tnfr/utils/init.pyi +80 -0
- tnfr/utils/io.py +559 -0
- tnfr/utils/io.pyi +66 -0
- tnfr/utils/numeric.py +114 -0
- tnfr/utils/numeric.pyi +21 -0
- tnfr/validation/__init__.py +257 -0
- tnfr/validation/__init__.pyi +85 -0
- tnfr/validation/compatibility.py +460 -0
- tnfr/validation/compatibility.pyi +6 -0
- tnfr/validation/config.py +73 -0
- tnfr/validation/graph.py +139 -0
- tnfr/validation/graph.pyi +18 -0
- tnfr/validation/input_validation.py +755 -0
- tnfr/validation/invariants.py +712 -0
- tnfr/validation/rules.py +253 -0
- tnfr/validation/rules.pyi +44 -0
- tnfr/validation/runtime.py +279 -0
- tnfr/validation/runtime.pyi +28 -0
- tnfr/validation/sequence_validator.py +162 -0
- tnfr/validation/soft_filters.py +170 -0
- tnfr/validation/soft_filters.pyi +32 -0
- tnfr/validation/spectral.py +164 -0
- tnfr/validation/spectral.pyi +42 -0
- tnfr/validation/validator.py +1266 -0
- tnfr/validation/window.py +39 -0
- tnfr/validation/window.pyi +1 -0
- tnfr/visualization/__init__.py +98 -0
- tnfr/visualization/cascade_viz.py +256 -0
- tnfr/visualization/hierarchy.py +284 -0
- tnfr/visualization/sequence_plotter.py +784 -0
- tnfr/viz/__init__.py +60 -0
- tnfr/viz/matplotlib.py +278 -0
- tnfr/viz/matplotlib.pyi +35 -0
- tnfr-8.5.0.dist-info/METADATA +573 -0
- tnfr-8.5.0.dist-info/RECORD +353 -0
- tnfr-8.5.0.dist-info/entry_points.txt +3 -0
- tnfr-3.0.3.dist-info/licenses/LICENSE.txt → tnfr-8.5.0.dist-info/licenses/LICENSE.md +1 -1
- tnfr/constants.py +0 -183
- tnfr/dynamics.py +0 -543
- tnfr/helpers.py +0 -198
- tnfr/main.py +0 -37
- tnfr/operators.py +0 -296
- tnfr-3.0.3.dist-info/METADATA +0 -35
- tnfr-3.0.3.dist-info/RECORD +0 -13
- {tnfr-3.0.3.dist-info → tnfr-8.5.0.dist-info}/WHEEL +0 -0
- {tnfr-3.0.3.dist-info → tnfr-8.5.0.dist-info}/top_level.txt +0 -0
tnfr/node.py
ADDED
|
@@ -0,0 +1,763 @@
|
|
|
1
|
+
"""Node utilities and structures for TNFR graphs."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import copy
|
|
6
|
+
import math
|
|
7
|
+
from collections.abc import Hashable
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from typing import (
|
|
10
|
+
Any,
|
|
11
|
+
Callable,
|
|
12
|
+
Iterable,
|
|
13
|
+
Mapping,
|
|
14
|
+
MutableMapping,
|
|
15
|
+
Optional,
|
|
16
|
+
Protocol,
|
|
17
|
+
Sequence,
|
|
18
|
+
SupportsFloat,
|
|
19
|
+
TypeVar,
|
|
20
|
+
)
|
|
21
|
+
from weakref import WeakValueDictionary
|
|
22
|
+
|
|
23
|
+
import numpy as np
|
|
24
|
+
|
|
25
|
+
from .alias import (
|
|
26
|
+
get_attr,
|
|
27
|
+
get_attr_str,
|
|
28
|
+
get_theta_attr,
|
|
29
|
+
set_attr,
|
|
30
|
+
set_attr_str,
|
|
31
|
+
set_attr_generic,
|
|
32
|
+
set_dnfr,
|
|
33
|
+
set_theta,
|
|
34
|
+
set_vf,
|
|
35
|
+
)
|
|
36
|
+
from .config import context_flags, get_flags
|
|
37
|
+
from .constants.aliases import (
|
|
38
|
+
ALIAS_D2EPI,
|
|
39
|
+
ALIAS_DNFR,
|
|
40
|
+
ALIAS_EPI,
|
|
41
|
+
ALIAS_EPI_KIND,
|
|
42
|
+
ALIAS_SI,
|
|
43
|
+
ALIAS_THETA,
|
|
44
|
+
ALIAS_VF,
|
|
45
|
+
)
|
|
46
|
+
from .mathematics import (
|
|
47
|
+
BasicStateProjector,
|
|
48
|
+
CoherenceOperator,
|
|
49
|
+
FrequencyOperator,
|
|
50
|
+
HilbertSpace,
|
|
51
|
+
StateProjector,
|
|
52
|
+
)
|
|
53
|
+
from .validation import NFRValidator
|
|
54
|
+
from .mathematics.operators_factory import (
|
|
55
|
+
make_coherence_operator,
|
|
56
|
+
make_frequency_operator,
|
|
57
|
+
)
|
|
58
|
+
from .mathematics.runtime import (
|
|
59
|
+
coherence as runtime_coherence,
|
|
60
|
+
frequency_positive as runtime_frequency_positive,
|
|
61
|
+
normalized as runtime_normalized,
|
|
62
|
+
stable_unitary as runtime_stable_unitary,
|
|
63
|
+
)
|
|
64
|
+
from .locking import get_lock
|
|
65
|
+
from .types import (
|
|
66
|
+
CouplingWeight,
|
|
67
|
+
DeltaNFR,
|
|
68
|
+
EPIValue,
|
|
69
|
+
NodeId,
|
|
70
|
+
Phase,
|
|
71
|
+
SecondDerivativeEPI,
|
|
72
|
+
SenseIndex,
|
|
73
|
+
StructuralFrequency,
|
|
74
|
+
TNFRGraph,
|
|
75
|
+
ZERO_BEPI_STORAGE,
|
|
76
|
+
ensure_bepi,
|
|
77
|
+
serialize_bepi,
|
|
78
|
+
)
|
|
79
|
+
from .utils import (
|
|
80
|
+
cached_node_list,
|
|
81
|
+
ensure_node_offset_map,
|
|
82
|
+
get_logger,
|
|
83
|
+
increment_edge_version,
|
|
84
|
+
supports_add_edge,
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
T = TypeVar("T")
|
|
88
|
+
|
|
89
|
+
__all__ = ("NodeNX", "NodeProtocol", "add_edge")
|
|
90
|
+
|
|
91
|
+
LOGGER = get_logger(__name__)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
@dataclass(frozen=True)
|
|
95
|
+
class AttrSpec:
|
|
96
|
+
"""Configuration required to expose a ``networkx`` node attribute.
|
|
97
|
+
|
|
98
|
+
``AttrSpec`` mirrors the defaults previously used by
|
|
99
|
+
:func:`_nx_attr_property` and centralises the descriptor generation
|
|
100
|
+
logic to keep a single source of truth for NodeNX attribute access.
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
aliases: tuple[str, ...]
|
|
104
|
+
default: Any = 0.0
|
|
105
|
+
getter: Callable[[MutableMapping[str, Any], tuple[str, ...], Any], Any] = get_attr
|
|
106
|
+
setter: Callable[..., None] = set_attr
|
|
107
|
+
to_python: Callable[[Any], Any] = float
|
|
108
|
+
to_storage: Callable[[Any], Any] = float
|
|
109
|
+
use_graph_setter: bool = False
|
|
110
|
+
|
|
111
|
+
def build_property(self) -> property:
|
|
112
|
+
"""Create the property descriptor for ``NodeNX`` attributes."""
|
|
113
|
+
|
|
114
|
+
def fget(instance: "NodeNX") -> T:
|
|
115
|
+
return self.to_python(
|
|
116
|
+
self.getter(instance.G.nodes[instance.n], self.aliases, self.default)
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
def fset(instance: "NodeNX", value: T) -> None:
|
|
120
|
+
value = self.to_storage(value)
|
|
121
|
+
if self.use_graph_setter:
|
|
122
|
+
self.setter(instance.G, instance.n, value)
|
|
123
|
+
else:
|
|
124
|
+
self.setter(instance.G.nodes[instance.n], self.aliases, value)
|
|
125
|
+
|
|
126
|
+
return property(fget, fset)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
# Canonical adapters for BEPI storage ------------------------------------
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def _epi_to_python(value: Any) -> EPIValue:
|
|
133
|
+
if value is None:
|
|
134
|
+
raise ValueError("EPI attribute is required for BEPI nodes")
|
|
135
|
+
return ensure_bepi(value)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def _epi_to_storage(
|
|
139
|
+
value: Any,
|
|
140
|
+
) -> Mapping[str, tuple[complex, ...] | tuple[float, ...]]:
|
|
141
|
+
return serialize_bepi(value)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def _get_bepi_attr(
|
|
145
|
+
mapping: Mapping[str, Any], aliases: tuple[str, ...], default: Any
|
|
146
|
+
) -> Any:
|
|
147
|
+
return get_attr(mapping, aliases, default, conv=lambda obj: obj)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def _set_bepi_attr(
|
|
151
|
+
mapping: MutableMapping[str, Any], aliases: tuple[str, ...], value: Any
|
|
152
|
+
) -> Mapping[str, tuple[complex, ...] | tuple[float, ...]]:
|
|
153
|
+
return set_attr_generic(mapping, aliases, value, conv=lambda obj: obj)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
# Mapping of NodeNX attribute specifications used to generate property
|
|
157
|
+
# descriptors. Each entry defines the keyword arguments passed to
|
|
158
|
+
# ``AttrSpec.build_property`` for a given attribute name.
|
|
159
|
+
ATTR_SPECS: dict[str, AttrSpec] = {
|
|
160
|
+
"EPI": AttrSpec(
|
|
161
|
+
aliases=ALIAS_EPI,
|
|
162
|
+
default=ZERO_BEPI_STORAGE,
|
|
163
|
+
getter=_get_bepi_attr,
|
|
164
|
+
to_python=_epi_to_python,
|
|
165
|
+
to_storage=_epi_to_storage,
|
|
166
|
+
setter=_set_bepi_attr,
|
|
167
|
+
),
|
|
168
|
+
"vf": AttrSpec(aliases=ALIAS_VF, setter=set_vf, use_graph_setter=True),
|
|
169
|
+
"theta": AttrSpec(
|
|
170
|
+
aliases=ALIAS_THETA,
|
|
171
|
+
getter=lambda mapping, _aliases, default: get_theta_attr(mapping, default),
|
|
172
|
+
setter=set_theta,
|
|
173
|
+
use_graph_setter=True,
|
|
174
|
+
),
|
|
175
|
+
"Si": AttrSpec(aliases=ALIAS_SI),
|
|
176
|
+
"epi_kind": AttrSpec(
|
|
177
|
+
aliases=ALIAS_EPI_KIND,
|
|
178
|
+
default="",
|
|
179
|
+
getter=get_attr_str,
|
|
180
|
+
setter=set_attr_str,
|
|
181
|
+
to_python=str,
|
|
182
|
+
to_storage=str,
|
|
183
|
+
),
|
|
184
|
+
"dnfr": AttrSpec(aliases=ALIAS_DNFR, setter=set_dnfr, use_graph_setter=True),
|
|
185
|
+
"d2EPI": AttrSpec(aliases=ALIAS_D2EPI),
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def _add_edge_common(
|
|
190
|
+
n1: NodeId,
|
|
191
|
+
n2: NodeId,
|
|
192
|
+
weight: CouplingWeight | SupportsFloat | str,
|
|
193
|
+
) -> Optional[CouplingWeight]:
|
|
194
|
+
"""Validate basic edge constraints.
|
|
195
|
+
|
|
196
|
+
Returns the parsed weight if the edge can be added. ``None`` is returned
|
|
197
|
+
when the edge should be ignored (e.g. self-connections).
|
|
198
|
+
"""
|
|
199
|
+
|
|
200
|
+
if n1 == n2:
|
|
201
|
+
return None
|
|
202
|
+
|
|
203
|
+
weight = float(weight)
|
|
204
|
+
if not math.isfinite(weight):
|
|
205
|
+
raise ValueError("Edge weight must be a finite number")
|
|
206
|
+
if weight < 0:
|
|
207
|
+
raise ValueError("Edge weight must be non-negative")
|
|
208
|
+
|
|
209
|
+
return weight
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def add_edge(
|
|
213
|
+
graph: TNFRGraph,
|
|
214
|
+
n1: NodeId,
|
|
215
|
+
n2: NodeId,
|
|
216
|
+
weight: CouplingWeight | SupportsFloat | str,
|
|
217
|
+
overwrite: bool = False,
|
|
218
|
+
) -> None:
|
|
219
|
+
"""Add an edge between ``n1`` and ``n2`` in a ``networkx`` graph."""
|
|
220
|
+
|
|
221
|
+
weight = _add_edge_common(n1, n2, weight)
|
|
222
|
+
if weight is None:
|
|
223
|
+
return
|
|
224
|
+
|
|
225
|
+
if not supports_add_edge(graph):
|
|
226
|
+
raise TypeError("add_edge only supports networkx graphs")
|
|
227
|
+
|
|
228
|
+
if graph.has_edge(n1, n2) and not overwrite:
|
|
229
|
+
return
|
|
230
|
+
|
|
231
|
+
graph.add_edge(n1, n2, weight=weight)
|
|
232
|
+
increment_edge_version(graph)
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
class NodeProtocol(Protocol):
|
|
236
|
+
"""Minimal protocol for TNFR nodes."""
|
|
237
|
+
|
|
238
|
+
EPI: EPIValue
|
|
239
|
+
vf: StructuralFrequency
|
|
240
|
+
theta: Phase
|
|
241
|
+
Si: SenseIndex
|
|
242
|
+
epi_kind: str
|
|
243
|
+
dnfr: DeltaNFR
|
|
244
|
+
d2EPI: SecondDerivativeEPI
|
|
245
|
+
graph: MutableMapping[str, Any]
|
|
246
|
+
|
|
247
|
+
def neighbors(self) -> Iterable[NodeProtocol | Hashable]:
|
|
248
|
+
"""Iterate structural neighbours coupled to this node."""
|
|
249
|
+
|
|
250
|
+
...
|
|
251
|
+
|
|
252
|
+
def _glyph_storage(self) -> MutableMapping[str, object]:
|
|
253
|
+
"""Return the mutable mapping storing glyph metadata."""
|
|
254
|
+
|
|
255
|
+
...
|
|
256
|
+
|
|
257
|
+
def has_edge(self, other: "NodeProtocol") -> bool:
|
|
258
|
+
"""Return ``True`` when an edge connects this node to ``other``."""
|
|
259
|
+
|
|
260
|
+
...
|
|
261
|
+
|
|
262
|
+
def add_edge(
|
|
263
|
+
self,
|
|
264
|
+
other: NodeProtocol,
|
|
265
|
+
weight: CouplingWeight,
|
|
266
|
+
*,
|
|
267
|
+
overwrite: bool = False,
|
|
268
|
+
) -> None:
|
|
269
|
+
"""Couple ``other`` using ``weight`` optionally replacing existing links."""
|
|
270
|
+
|
|
271
|
+
...
|
|
272
|
+
|
|
273
|
+
def offset(self) -> int:
|
|
274
|
+
"""Return the node offset index within the canonical ordering."""
|
|
275
|
+
|
|
276
|
+
...
|
|
277
|
+
|
|
278
|
+
def all_nodes(self) -> Iterable[NodeProtocol]:
|
|
279
|
+
"""Iterate all nodes of the attached graph as :class:`NodeProtocol` objects."""
|
|
280
|
+
|
|
281
|
+
...
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
class NodeNX(NodeProtocol):
|
|
285
|
+
"""Adapter for ``networkx`` nodes."""
|
|
286
|
+
|
|
287
|
+
# Statically defined property descriptors for ``NodeNX`` attributes.
|
|
288
|
+
# Declaring them here makes the attributes discoverable by type checkers
|
|
289
|
+
# and IDEs, avoiding the previous runtime ``setattr`` loop.
|
|
290
|
+
EPI: EPIValue = ATTR_SPECS["EPI"].build_property()
|
|
291
|
+
vf: StructuralFrequency = ATTR_SPECS["vf"].build_property()
|
|
292
|
+
theta: Phase = ATTR_SPECS["theta"].build_property()
|
|
293
|
+
Si: SenseIndex = ATTR_SPECS["Si"].build_property()
|
|
294
|
+
epi_kind: str = ATTR_SPECS["epi_kind"].build_property()
|
|
295
|
+
dnfr: DeltaNFR = ATTR_SPECS["dnfr"].build_property()
|
|
296
|
+
d2EPI: SecondDerivativeEPI = ATTR_SPECS["d2EPI"].build_property()
|
|
297
|
+
|
|
298
|
+
@staticmethod
|
|
299
|
+
def _prepare_coherence_operator(
|
|
300
|
+
operator: CoherenceOperator | None,
|
|
301
|
+
*,
|
|
302
|
+
dim: int | None = None,
|
|
303
|
+
spectrum: Sequence[float] | np.ndarray | None = None,
|
|
304
|
+
c_min: float | None = None,
|
|
305
|
+
) -> CoherenceOperator | None:
|
|
306
|
+
if operator is not None:
|
|
307
|
+
return operator
|
|
308
|
+
|
|
309
|
+
spectrum_array: np.ndarray | None
|
|
310
|
+
if spectrum is None:
|
|
311
|
+
spectrum_array = None
|
|
312
|
+
else:
|
|
313
|
+
spectrum_array = np.asarray(spectrum, dtype=np.complex128)
|
|
314
|
+
if spectrum_array.ndim != 1:
|
|
315
|
+
raise ValueError("Coherence spectrum must be one-dimensional.")
|
|
316
|
+
|
|
317
|
+
effective_dim = dim
|
|
318
|
+
if spectrum_array is not None:
|
|
319
|
+
spectrum_length = spectrum_array.shape[0]
|
|
320
|
+
if effective_dim is None:
|
|
321
|
+
effective_dim = int(spectrum_length)
|
|
322
|
+
elif spectrum_length != int(effective_dim):
|
|
323
|
+
raise ValueError(
|
|
324
|
+
"Coherence spectrum size mismatch with requested dimension."
|
|
325
|
+
)
|
|
326
|
+
|
|
327
|
+
if effective_dim is None:
|
|
328
|
+
return None
|
|
329
|
+
|
|
330
|
+
kwargs: dict[str, Any] = {}
|
|
331
|
+
if spectrum_array is not None:
|
|
332
|
+
kwargs["spectrum"] = spectrum_array
|
|
333
|
+
if c_min is not None:
|
|
334
|
+
kwargs["c_min"] = float(c_min)
|
|
335
|
+
return make_coherence_operator(int(effective_dim), **kwargs)
|
|
336
|
+
|
|
337
|
+
@staticmethod
|
|
338
|
+
def _prepare_frequency_operator(
|
|
339
|
+
operator: FrequencyOperator | None,
|
|
340
|
+
*,
|
|
341
|
+
matrix: Sequence[Sequence[complex]] | np.ndarray | None = None,
|
|
342
|
+
) -> FrequencyOperator | None:
|
|
343
|
+
if operator is not None:
|
|
344
|
+
return operator
|
|
345
|
+
if matrix is None:
|
|
346
|
+
return None
|
|
347
|
+
return make_frequency_operator(np.asarray(matrix, dtype=np.complex128))
|
|
348
|
+
|
|
349
|
+
def __init__(
|
|
350
|
+
self,
|
|
351
|
+
G: TNFRGraph,
|
|
352
|
+
n: NodeId,
|
|
353
|
+
*,
|
|
354
|
+
state_projector: StateProjector | None = None,
|
|
355
|
+
enable_math_validation: Optional[bool] = None,
|
|
356
|
+
hilbert_space: HilbertSpace | None = None,
|
|
357
|
+
coherence_operator: CoherenceOperator | None = None,
|
|
358
|
+
coherence_dim: int | None = None,
|
|
359
|
+
coherence_spectrum: Sequence[float] | np.ndarray | None = None,
|
|
360
|
+
coherence_c_min: float | None = None,
|
|
361
|
+
frequency_operator: FrequencyOperator | None = None,
|
|
362
|
+
frequency_matrix: Sequence[Sequence[complex]] | np.ndarray | None = None,
|
|
363
|
+
coherence_threshold: float | None = None,
|
|
364
|
+
validator: NFRValidator | None = None,
|
|
365
|
+
rng: np.random.Generator | None = None,
|
|
366
|
+
) -> None:
|
|
367
|
+
self.G: TNFRGraph = G
|
|
368
|
+
self.n: NodeId = n
|
|
369
|
+
self.graph: MutableMapping[str, Any] = G.graph
|
|
370
|
+
self.state_projector: StateProjector = state_projector or BasicStateProjector()
|
|
371
|
+
self._math_validation_override: Optional[bool] = enable_math_validation
|
|
372
|
+
if enable_math_validation is None:
|
|
373
|
+
effective_validation = get_flags().enable_math_validation
|
|
374
|
+
else:
|
|
375
|
+
effective_validation = bool(enable_math_validation)
|
|
376
|
+
self.enable_math_validation: bool = effective_validation
|
|
377
|
+
default_dimension = (
|
|
378
|
+
G.number_of_nodes()
|
|
379
|
+
if hasattr(G, "number_of_nodes")
|
|
380
|
+
else len(tuple(G.nodes))
|
|
381
|
+
)
|
|
382
|
+
default_dimension = max(1, int(default_dimension))
|
|
383
|
+
self.hilbert_space: HilbertSpace = hilbert_space or HilbertSpace(
|
|
384
|
+
default_dimension
|
|
385
|
+
)
|
|
386
|
+
if coherence_operator is not None and (
|
|
387
|
+
coherence_dim is not None
|
|
388
|
+
or coherence_spectrum is not None
|
|
389
|
+
or coherence_c_min is not None
|
|
390
|
+
):
|
|
391
|
+
raise ValueError(
|
|
392
|
+
"Provide either a coherence operator or factory parameters, not both."
|
|
393
|
+
)
|
|
394
|
+
if frequency_operator is not None and frequency_matrix is not None:
|
|
395
|
+
raise ValueError(
|
|
396
|
+
"Provide either a frequency operator or frequency matrix, not both."
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
self.coherence_operator: CoherenceOperator | None = (
|
|
400
|
+
self._prepare_coherence_operator(
|
|
401
|
+
coherence_operator,
|
|
402
|
+
dim=coherence_dim,
|
|
403
|
+
spectrum=coherence_spectrum,
|
|
404
|
+
c_min=coherence_c_min,
|
|
405
|
+
)
|
|
406
|
+
)
|
|
407
|
+
self.frequency_operator: FrequencyOperator | None = (
|
|
408
|
+
self._prepare_frequency_operator(
|
|
409
|
+
frequency_operator,
|
|
410
|
+
matrix=frequency_matrix,
|
|
411
|
+
)
|
|
412
|
+
)
|
|
413
|
+
self.coherence_threshold: float | None = (
|
|
414
|
+
float(coherence_threshold) if coherence_threshold is not None else None
|
|
415
|
+
)
|
|
416
|
+
self.validator: NFRValidator | None = validator
|
|
417
|
+
self.rng: np.random.Generator | None = rng
|
|
418
|
+
# Only add to default cache if not being created by from_graph
|
|
419
|
+
if not G.graph.get("_creating_node", False):
|
|
420
|
+
G.graph.setdefault("_node_cache", {})[n] = self
|
|
421
|
+
|
|
422
|
+
def _glyph_storage(self) -> MutableMapping[str, Any]:
|
|
423
|
+
return self.G.nodes[self.n]
|
|
424
|
+
|
|
425
|
+
@classmethod
|
|
426
|
+
def from_graph(
|
|
427
|
+
cls, G: TNFRGraph, n: NodeId, *, use_weak_cache: bool = False
|
|
428
|
+
) -> "NodeNX":
|
|
429
|
+
"""Return cached ``NodeNX`` for ``(G, n)`` with thread safety.
|
|
430
|
+
|
|
431
|
+
Parameters
|
|
432
|
+
----------
|
|
433
|
+
G : TNFRGraph
|
|
434
|
+
The graph containing the node.
|
|
435
|
+
n : NodeId
|
|
436
|
+
The node identifier.
|
|
437
|
+
use_weak_cache : bool, optional
|
|
438
|
+
When True, use WeakValueDictionary for the node cache to allow
|
|
439
|
+
automatic garbage collection of unused NodeNX instances. This is
|
|
440
|
+
useful for ephemeral graphs where nodes are created temporarily
|
|
441
|
+
and should be released when no longer referenced elsewhere.
|
|
442
|
+
Default is False to maintain backward compatibility.
|
|
443
|
+
|
|
444
|
+
Returns
|
|
445
|
+
-------
|
|
446
|
+
NodeNX
|
|
447
|
+
The cached or newly created NodeNX instance for the specified node.
|
|
448
|
+
|
|
449
|
+
Notes
|
|
450
|
+
-----
|
|
451
|
+
The weak cache mode trades off some cache retention for better memory
|
|
452
|
+
behavior in scenarios with many short-lived graphs or when nodes are
|
|
453
|
+
accessed infrequently. Use weak caching when:
|
|
454
|
+
|
|
455
|
+
- Processing many ephemeral graphs sequentially
|
|
456
|
+
- Working with large graphs where only subsets are actively used
|
|
457
|
+
- Memory pressure is a concern and stale node objects should be released
|
|
458
|
+
|
|
459
|
+
The default strong cache provides better performance for long-lived
|
|
460
|
+
graphs with repeated node access patterns.
|
|
461
|
+
"""
|
|
462
|
+
cache_key = "_node_cache_weak" if use_weak_cache else "_node_cache"
|
|
463
|
+
|
|
464
|
+
# Fast path: lock-free read for cache hit (common case)
|
|
465
|
+
cache = G.graph.get(cache_key)
|
|
466
|
+
if cache is not None:
|
|
467
|
+
node = cache.get(n)
|
|
468
|
+
if node is not None:
|
|
469
|
+
return node
|
|
470
|
+
|
|
471
|
+
# Slow path: need to create node or initialize cache
|
|
472
|
+
# Use per-node lock for finer granularity and reduced contention
|
|
473
|
+
lock = get_lock(f"node_nx_{id(G)}_{n}_{cache_key}")
|
|
474
|
+
with lock:
|
|
475
|
+
# Double-check pattern: verify node still doesn't exist
|
|
476
|
+
cache = G.graph.get(cache_key)
|
|
477
|
+
if cache is not None:
|
|
478
|
+
node = cache.get(n)
|
|
479
|
+
if node is not None:
|
|
480
|
+
return node
|
|
481
|
+
|
|
482
|
+
# Initialize cache if needed
|
|
483
|
+
if cache is None:
|
|
484
|
+
# Use a separate lock for cache initialization to avoid deadlocks
|
|
485
|
+
graph_lock = get_lock(f"node_nx_cache_init_{id(G)}_{cache_key}")
|
|
486
|
+
with graph_lock:
|
|
487
|
+
# Triple-check: another thread may have initialized
|
|
488
|
+
cache = G.graph.get(cache_key)
|
|
489
|
+
if cache is None:
|
|
490
|
+
if use_weak_cache:
|
|
491
|
+
cache = WeakValueDictionary()
|
|
492
|
+
else:
|
|
493
|
+
cache = {}
|
|
494
|
+
G.graph[cache_key] = cache
|
|
495
|
+
|
|
496
|
+
# Check again after cache initialization
|
|
497
|
+
node = cache.get(n)
|
|
498
|
+
if node is not None:
|
|
499
|
+
return node
|
|
500
|
+
|
|
501
|
+
# Create node - use a sentinel to prevent __init__ from adding to cache
|
|
502
|
+
G.graph["_creating_node"] = True
|
|
503
|
+
try:
|
|
504
|
+
node = cls(G, n)
|
|
505
|
+
finally:
|
|
506
|
+
G.graph.pop("_creating_node", None)
|
|
507
|
+
|
|
508
|
+
# Add to requested cache only
|
|
509
|
+
cache[n] = node
|
|
510
|
+
|
|
511
|
+
return node
|
|
512
|
+
|
|
513
|
+
def neighbors(self) -> Iterable[NodeId]:
|
|
514
|
+
"""Iterate neighbour identifiers (IDs).
|
|
515
|
+
|
|
516
|
+
Wrap each resulting ID with :meth:`from_graph` to obtain the cached
|
|
517
|
+
``NodeNX`` instance when actual node objects are required.
|
|
518
|
+
"""
|
|
519
|
+
return self.G.neighbors(self.n)
|
|
520
|
+
|
|
521
|
+
def has_edge(self, other: NodeProtocol) -> bool:
|
|
522
|
+
"""Return ``True`` when an edge connects this node to ``other``."""
|
|
523
|
+
|
|
524
|
+
if isinstance(other, NodeNX):
|
|
525
|
+
return self.G.has_edge(self.n, other.n)
|
|
526
|
+
raise NotImplementedError
|
|
527
|
+
|
|
528
|
+
def add_edge(
|
|
529
|
+
self,
|
|
530
|
+
other: NodeProtocol,
|
|
531
|
+
weight: CouplingWeight,
|
|
532
|
+
*,
|
|
533
|
+
overwrite: bool = False,
|
|
534
|
+
) -> None:
|
|
535
|
+
"""Couple ``other`` using ``weight`` optionally replacing existing links."""
|
|
536
|
+
|
|
537
|
+
if isinstance(other, NodeNX):
|
|
538
|
+
add_edge(
|
|
539
|
+
self.G,
|
|
540
|
+
self.n,
|
|
541
|
+
other.n,
|
|
542
|
+
weight,
|
|
543
|
+
overwrite,
|
|
544
|
+
)
|
|
545
|
+
else:
|
|
546
|
+
raise NotImplementedError
|
|
547
|
+
|
|
548
|
+
def offset(self) -> int:
|
|
549
|
+
"""Return the cached node offset within the canonical ordering."""
|
|
550
|
+
|
|
551
|
+
mapping = ensure_node_offset_map(self.G)
|
|
552
|
+
return mapping.get(self.n, 0)
|
|
553
|
+
|
|
554
|
+
def all_nodes(self) -> Iterable[NodeProtocol]:
|
|
555
|
+
"""Iterate all nodes of ``self.G`` as ``NodeNX`` adapters."""
|
|
556
|
+
|
|
557
|
+
override = self.graph.get("_all_nodes")
|
|
558
|
+
if override is not None:
|
|
559
|
+
return override
|
|
560
|
+
|
|
561
|
+
nodes = cached_node_list(self.G)
|
|
562
|
+
return tuple(NodeNX.from_graph(self.G, v) for v in nodes)
|
|
563
|
+
|
|
564
|
+
def run_sequence_with_validation(
|
|
565
|
+
self,
|
|
566
|
+
ops: Iterable[Callable[[TNFRGraph, NodeId], None]],
|
|
567
|
+
*,
|
|
568
|
+
projector: StateProjector | None = None,
|
|
569
|
+
hilbert_space: HilbertSpace | None = None,
|
|
570
|
+
coherence_operator: CoherenceOperator | None = None,
|
|
571
|
+
coherence_dim: int | None = None,
|
|
572
|
+
coherence_spectrum: Sequence[float] | np.ndarray | None = None,
|
|
573
|
+
coherence_c_min: float | None = None,
|
|
574
|
+
coherence_threshold: float | None = None,
|
|
575
|
+
frequency_operator: FrequencyOperator | None = None,
|
|
576
|
+
frequency_matrix: Sequence[Sequence[complex]] | np.ndarray | None = None,
|
|
577
|
+
validator: NFRValidator | None = None,
|
|
578
|
+
enforce_frequency_positivity: bool | None = None,
|
|
579
|
+
enable_validation: bool | None = None,
|
|
580
|
+
rng: np.random.Generator | None = None,
|
|
581
|
+
log_metrics: bool = False,
|
|
582
|
+
) -> dict[str, Any]:
|
|
583
|
+
"""Run ``ops`` then return pre/post metrics with optional validation."""
|
|
584
|
+
|
|
585
|
+
from .structural import run_sequence as structural_run_sequence
|
|
586
|
+
|
|
587
|
+
projector = projector or self.state_projector
|
|
588
|
+
hilbert = hilbert_space or self.hilbert_space
|
|
589
|
+
|
|
590
|
+
effective_coherence = (
|
|
591
|
+
self._prepare_coherence_operator(
|
|
592
|
+
coherence_operator,
|
|
593
|
+
dim=coherence_dim,
|
|
594
|
+
spectrum=coherence_spectrum,
|
|
595
|
+
c_min=(
|
|
596
|
+
coherence_c_min
|
|
597
|
+
if coherence_c_min is not None
|
|
598
|
+
else (
|
|
599
|
+
self.coherence_operator.c_min
|
|
600
|
+
if self.coherence_operator is not None
|
|
601
|
+
else None
|
|
602
|
+
)
|
|
603
|
+
),
|
|
604
|
+
)
|
|
605
|
+
if any(
|
|
606
|
+
parameter is not None
|
|
607
|
+
for parameter in (
|
|
608
|
+
coherence_operator,
|
|
609
|
+
coherence_dim,
|
|
610
|
+
coherence_spectrum,
|
|
611
|
+
coherence_c_min,
|
|
612
|
+
)
|
|
613
|
+
)
|
|
614
|
+
else self.coherence_operator
|
|
615
|
+
)
|
|
616
|
+
effective_freq = (
|
|
617
|
+
self._prepare_frequency_operator(
|
|
618
|
+
frequency_operator,
|
|
619
|
+
matrix=frequency_matrix,
|
|
620
|
+
)
|
|
621
|
+
if frequency_operator is not None or frequency_matrix is not None
|
|
622
|
+
else self.frequency_operator
|
|
623
|
+
)
|
|
624
|
+
threshold = (
|
|
625
|
+
float(coherence_threshold)
|
|
626
|
+
if coherence_threshold is not None
|
|
627
|
+
else self.coherence_threshold
|
|
628
|
+
)
|
|
629
|
+
validator = validator or self.validator
|
|
630
|
+
rng = rng or self.rng
|
|
631
|
+
|
|
632
|
+
if enable_validation is None:
|
|
633
|
+
if self._math_validation_override is not None:
|
|
634
|
+
should_validate = bool(self._math_validation_override)
|
|
635
|
+
else:
|
|
636
|
+
should_validate = bool(get_flags().enable_math_validation)
|
|
637
|
+
else:
|
|
638
|
+
should_validate = bool(enable_validation)
|
|
639
|
+
self.enable_math_validation = should_validate
|
|
640
|
+
|
|
641
|
+
enforce_frequency = (
|
|
642
|
+
bool(enforce_frequency_positivity)
|
|
643
|
+
if enforce_frequency_positivity is not None
|
|
644
|
+
else bool(effective_freq is not None)
|
|
645
|
+
)
|
|
646
|
+
|
|
647
|
+
def _project(epi: float, vf: float, theta: float) -> np.ndarray:
|
|
648
|
+
local_rng = None
|
|
649
|
+
if rng is not None:
|
|
650
|
+
bit_generator = rng.bit_generator
|
|
651
|
+
cloned_state = copy.deepcopy(bit_generator.state)
|
|
652
|
+
local_bit_generator = type(bit_generator)()
|
|
653
|
+
local_bit_generator.state = cloned_state
|
|
654
|
+
local_rng = np.random.Generator(local_bit_generator)
|
|
655
|
+
vector = projector(
|
|
656
|
+
epi=epi,
|
|
657
|
+
nu_f=vf,
|
|
658
|
+
theta=theta,
|
|
659
|
+
dim=hilbert.dimension,
|
|
660
|
+
rng=local_rng,
|
|
661
|
+
)
|
|
662
|
+
return np.asarray(vector, dtype=np.complex128)
|
|
663
|
+
|
|
664
|
+
active_flags = get_flags()
|
|
665
|
+
should_log_metrics = bool(log_metrics and active_flags.log_performance)
|
|
666
|
+
|
|
667
|
+
def _metrics(state: np.ndarray, label: str) -> dict[str, Any]:
|
|
668
|
+
metrics: dict[str, Any] = {}
|
|
669
|
+
with context_flags(log_performance=False):
|
|
670
|
+
norm_passed, norm_value = runtime_normalized(
|
|
671
|
+
state, hilbert, label=label
|
|
672
|
+
)
|
|
673
|
+
metrics["normalized"] = bool(norm_passed)
|
|
674
|
+
metrics["norm"] = float(norm_value)
|
|
675
|
+
if effective_coherence is not None and threshold is not None:
|
|
676
|
+
coh_passed, coh_value = runtime_coherence(
|
|
677
|
+
state, effective_coherence, threshold, label=label
|
|
678
|
+
)
|
|
679
|
+
metrics["coherence"] = bool(coh_passed)
|
|
680
|
+
metrics["coherence_expectation"] = float(coh_value)
|
|
681
|
+
metrics["coherence_threshold"] = float(threshold)
|
|
682
|
+
if effective_freq is not None:
|
|
683
|
+
freq_summary = runtime_frequency_positive(
|
|
684
|
+
state,
|
|
685
|
+
effective_freq,
|
|
686
|
+
enforce=enforce_frequency,
|
|
687
|
+
label=label,
|
|
688
|
+
)
|
|
689
|
+
metrics["frequency_positive"] = bool(freq_summary["passed"])
|
|
690
|
+
metrics["frequency_expectation"] = float(freq_summary["value"])
|
|
691
|
+
metrics["frequency_projection_passed"] = bool(
|
|
692
|
+
freq_summary["projection_passed"]
|
|
693
|
+
)
|
|
694
|
+
metrics["frequency_spectrum_psd"] = bool(
|
|
695
|
+
freq_summary["spectrum_psd"]
|
|
696
|
+
)
|
|
697
|
+
metrics["frequency_spectrum_min"] = float(
|
|
698
|
+
freq_summary["spectrum_min"]
|
|
699
|
+
)
|
|
700
|
+
metrics["frequency_enforced"] = bool(freq_summary["enforce"])
|
|
701
|
+
if effective_coherence is not None:
|
|
702
|
+
unitary_passed, unitary_norm = runtime_stable_unitary(
|
|
703
|
+
state,
|
|
704
|
+
effective_coherence,
|
|
705
|
+
hilbert,
|
|
706
|
+
label=label,
|
|
707
|
+
)
|
|
708
|
+
metrics["stable_unitary"] = bool(unitary_passed)
|
|
709
|
+
metrics["stable_unitary_norm_after"] = float(unitary_norm)
|
|
710
|
+
if should_log_metrics:
|
|
711
|
+
LOGGER.debug(
|
|
712
|
+
"node_metrics.%s normalized=%s coherence=%s frequency_positive=%s stable_unitary=%s coherence_expectation=%s frequency_expectation=%s",
|
|
713
|
+
label,
|
|
714
|
+
metrics.get("normalized"),
|
|
715
|
+
metrics.get("coherence"),
|
|
716
|
+
metrics.get("frequency_positive"),
|
|
717
|
+
metrics.get("stable_unitary"),
|
|
718
|
+
metrics.get("coherence_expectation"),
|
|
719
|
+
metrics.get("frequency_expectation"),
|
|
720
|
+
)
|
|
721
|
+
return metrics
|
|
722
|
+
|
|
723
|
+
pre_state = _project(self.EPI, self.vf, self.theta)
|
|
724
|
+
pre_metrics = _metrics(pre_state, "pre")
|
|
725
|
+
|
|
726
|
+
structural_run_sequence(self.G, self.n, ops)
|
|
727
|
+
|
|
728
|
+
post_state = _project(self.EPI, self.vf, self.theta)
|
|
729
|
+
post_metrics = _metrics(post_state, "post")
|
|
730
|
+
|
|
731
|
+
validation_summary: dict[str, Any] | None = None
|
|
732
|
+
if should_validate:
|
|
733
|
+
validator_instance = validator
|
|
734
|
+
if validator_instance is None:
|
|
735
|
+
if effective_coherence is None:
|
|
736
|
+
raise ValueError("Validation requires a coherence operator.")
|
|
737
|
+
validator_instance = NFRValidator(
|
|
738
|
+
hilbert,
|
|
739
|
+
effective_coherence,
|
|
740
|
+
threshold if threshold is not None else 0.0,
|
|
741
|
+
frequency_operator=effective_freq,
|
|
742
|
+
)
|
|
743
|
+
outcome = validator_instance.validate(
|
|
744
|
+
post_state,
|
|
745
|
+
enforce_frequency_positivity=enforce_frequency,
|
|
746
|
+
)
|
|
747
|
+
validation_summary = {
|
|
748
|
+
"passed": bool(outcome.passed),
|
|
749
|
+
"summary": outcome.summary,
|
|
750
|
+
"report": validator_instance.report(outcome),
|
|
751
|
+
}
|
|
752
|
+
|
|
753
|
+
result = {
|
|
754
|
+
"pre_state": pre_state,
|
|
755
|
+
"post_state": post_state,
|
|
756
|
+
"pre_metrics": pre_metrics,
|
|
757
|
+
"post_metrics": post_metrics,
|
|
758
|
+
"validation": validation_summary,
|
|
759
|
+
}
|
|
760
|
+
# Preserve legacy structure for downstream compatibility.
|
|
761
|
+
result["pre"] = {"state": pre_state, "metrics": pre_metrics}
|
|
762
|
+
result["post"] = {"state": post_state, "metrics": post_metrics}
|
|
763
|
+
return result
|