tnfr 4.5.2__py3-none-any.whl → 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (195) hide show
  1. tnfr/__init__.py +275 -51
  2. tnfr/__init__.pyi +33 -0
  3. tnfr/_compat.py +10 -0
  4. tnfr/_generated_version.py +34 -0
  5. tnfr/_version.py +49 -0
  6. tnfr/_version.pyi +7 -0
  7. tnfr/alias.py +117 -31
  8. tnfr/alias.pyi +108 -0
  9. tnfr/cache.py +6 -572
  10. tnfr/cache.pyi +16 -0
  11. tnfr/callback_utils.py +16 -38
  12. tnfr/callback_utils.pyi +79 -0
  13. tnfr/cli/__init__.py +34 -14
  14. tnfr/cli/__init__.pyi +26 -0
  15. tnfr/cli/arguments.py +211 -28
  16. tnfr/cli/arguments.pyi +27 -0
  17. tnfr/cli/execution.py +470 -50
  18. tnfr/cli/execution.pyi +70 -0
  19. tnfr/cli/utils.py +18 -3
  20. tnfr/cli/utils.pyi +8 -0
  21. tnfr/config/__init__.py +13 -0
  22. tnfr/config/__init__.pyi +10 -0
  23. tnfr/{constants_glyphs.py → config/constants.py} +26 -20
  24. tnfr/config/constants.pyi +12 -0
  25. tnfr/config/feature_flags.py +83 -0
  26. tnfr/{config.py → config/init.py} +11 -7
  27. tnfr/config/init.pyi +8 -0
  28. tnfr/config/operator_names.py +93 -0
  29. tnfr/config/operator_names.pyi +28 -0
  30. tnfr/config/presets.py +84 -0
  31. tnfr/config/presets.pyi +7 -0
  32. tnfr/constants/__init__.py +80 -29
  33. tnfr/constants/__init__.pyi +92 -0
  34. tnfr/constants/aliases.py +31 -0
  35. tnfr/constants/core.py +4 -4
  36. tnfr/constants/core.pyi +17 -0
  37. tnfr/constants/init.py +1 -1
  38. tnfr/constants/init.pyi +12 -0
  39. tnfr/constants/metric.py +7 -15
  40. tnfr/constants/metric.pyi +19 -0
  41. tnfr/dynamics/__init__.py +165 -633
  42. tnfr/dynamics/__init__.pyi +82 -0
  43. tnfr/dynamics/adaptation.py +267 -0
  44. tnfr/dynamics/aliases.py +23 -0
  45. tnfr/dynamics/coordination.py +385 -0
  46. tnfr/dynamics/dnfr.py +2283 -400
  47. tnfr/dynamics/dnfr.pyi +24 -0
  48. tnfr/dynamics/integrators.py +406 -98
  49. tnfr/dynamics/integrators.pyi +34 -0
  50. tnfr/dynamics/runtime.py +881 -0
  51. tnfr/dynamics/sampling.py +10 -5
  52. tnfr/dynamics/sampling.pyi +7 -0
  53. tnfr/dynamics/selectors.py +719 -0
  54. tnfr/execution.py +70 -48
  55. tnfr/execution.pyi +45 -0
  56. tnfr/flatten.py +13 -9
  57. tnfr/flatten.pyi +21 -0
  58. tnfr/gamma.py +66 -53
  59. tnfr/gamma.pyi +34 -0
  60. tnfr/glyph_history.py +110 -52
  61. tnfr/glyph_history.pyi +35 -0
  62. tnfr/glyph_runtime.py +16 -0
  63. tnfr/glyph_runtime.pyi +9 -0
  64. tnfr/immutable.py +69 -28
  65. tnfr/immutable.pyi +34 -0
  66. tnfr/initialization.py +16 -16
  67. tnfr/initialization.pyi +65 -0
  68. tnfr/io.py +6 -240
  69. tnfr/io.pyi +16 -0
  70. tnfr/locking.pyi +7 -0
  71. tnfr/mathematics/__init__.py +81 -0
  72. tnfr/mathematics/backend.py +426 -0
  73. tnfr/mathematics/dynamics.py +398 -0
  74. tnfr/mathematics/epi.py +254 -0
  75. tnfr/mathematics/generators.py +222 -0
  76. tnfr/mathematics/metrics.py +119 -0
  77. tnfr/mathematics/operators.py +233 -0
  78. tnfr/mathematics/operators_factory.py +71 -0
  79. tnfr/mathematics/projection.py +78 -0
  80. tnfr/mathematics/runtime.py +173 -0
  81. tnfr/mathematics/spaces.py +247 -0
  82. tnfr/mathematics/transforms.py +292 -0
  83. tnfr/metrics/__init__.py +10 -10
  84. tnfr/metrics/__init__.pyi +20 -0
  85. tnfr/metrics/coherence.py +993 -324
  86. tnfr/metrics/common.py +23 -16
  87. tnfr/metrics/common.pyi +46 -0
  88. tnfr/metrics/core.py +251 -35
  89. tnfr/metrics/core.pyi +13 -0
  90. tnfr/metrics/diagnosis.py +708 -111
  91. tnfr/metrics/diagnosis.pyi +85 -0
  92. tnfr/metrics/export.py +27 -15
  93. tnfr/metrics/glyph_timing.py +232 -42
  94. tnfr/metrics/reporting.py +33 -22
  95. tnfr/metrics/reporting.pyi +12 -0
  96. tnfr/metrics/sense_index.py +987 -43
  97. tnfr/metrics/sense_index.pyi +9 -0
  98. tnfr/metrics/trig.py +214 -23
  99. tnfr/metrics/trig.pyi +13 -0
  100. tnfr/metrics/trig_cache.py +115 -22
  101. tnfr/metrics/trig_cache.pyi +10 -0
  102. tnfr/node.py +542 -136
  103. tnfr/node.pyi +178 -0
  104. tnfr/observers.py +152 -35
  105. tnfr/observers.pyi +31 -0
  106. tnfr/ontosim.py +23 -19
  107. tnfr/ontosim.pyi +28 -0
  108. tnfr/operators/__init__.py +601 -82
  109. tnfr/operators/__init__.pyi +45 -0
  110. tnfr/operators/definitions.py +513 -0
  111. tnfr/operators/definitions.pyi +78 -0
  112. tnfr/operators/grammar.py +760 -0
  113. tnfr/operators/jitter.py +107 -38
  114. tnfr/operators/jitter.pyi +11 -0
  115. tnfr/operators/registry.py +75 -0
  116. tnfr/operators/registry.pyi +13 -0
  117. tnfr/operators/remesh.py +149 -88
  118. tnfr/py.typed +0 -0
  119. tnfr/rng.py +46 -143
  120. tnfr/rng.pyi +14 -0
  121. tnfr/schemas/__init__.py +8 -0
  122. tnfr/schemas/grammar.json +94 -0
  123. tnfr/selector.py +25 -19
  124. tnfr/selector.pyi +19 -0
  125. tnfr/sense.py +72 -62
  126. tnfr/sense.pyi +23 -0
  127. tnfr/structural.py +522 -262
  128. tnfr/structural.pyi +69 -0
  129. tnfr/telemetry/__init__.py +35 -0
  130. tnfr/telemetry/cache_metrics.py +226 -0
  131. tnfr/telemetry/nu_f.py +423 -0
  132. tnfr/telemetry/nu_f.pyi +123 -0
  133. tnfr/telemetry/verbosity.py +37 -0
  134. tnfr/tokens.py +1 -3
  135. tnfr/tokens.pyi +36 -0
  136. tnfr/trace.py +270 -113
  137. tnfr/trace.pyi +40 -0
  138. tnfr/types.py +574 -6
  139. tnfr/types.pyi +331 -0
  140. tnfr/units.py +69 -0
  141. tnfr/units.pyi +16 -0
  142. tnfr/utils/__init__.py +217 -0
  143. tnfr/utils/__init__.pyi +202 -0
  144. tnfr/utils/cache.py +2395 -0
  145. tnfr/utils/cache.pyi +468 -0
  146. tnfr/utils/chunks.py +104 -0
  147. tnfr/utils/chunks.pyi +21 -0
  148. tnfr/{collections_utils.py → utils/data.py} +147 -90
  149. tnfr/utils/data.pyi +64 -0
  150. tnfr/utils/graph.py +85 -0
  151. tnfr/utils/graph.pyi +10 -0
  152. tnfr/utils/init.py +770 -0
  153. tnfr/utils/init.pyi +78 -0
  154. tnfr/utils/io.py +456 -0
  155. tnfr/{helpers → utils}/numeric.py +51 -24
  156. tnfr/utils/numeric.pyi +21 -0
  157. tnfr/validation/__init__.py +113 -0
  158. tnfr/validation/__init__.pyi +77 -0
  159. tnfr/validation/compatibility.py +95 -0
  160. tnfr/validation/compatibility.pyi +6 -0
  161. tnfr/validation/grammar.py +71 -0
  162. tnfr/validation/grammar.pyi +40 -0
  163. tnfr/validation/graph.py +138 -0
  164. tnfr/validation/graph.pyi +17 -0
  165. tnfr/validation/rules.py +281 -0
  166. tnfr/validation/rules.pyi +55 -0
  167. tnfr/validation/runtime.py +263 -0
  168. tnfr/validation/runtime.pyi +31 -0
  169. tnfr/validation/soft_filters.py +170 -0
  170. tnfr/validation/soft_filters.pyi +37 -0
  171. tnfr/validation/spectral.py +159 -0
  172. tnfr/validation/spectral.pyi +46 -0
  173. tnfr/validation/syntax.py +40 -0
  174. tnfr/validation/syntax.pyi +10 -0
  175. tnfr/validation/window.py +39 -0
  176. tnfr/validation/window.pyi +1 -0
  177. tnfr/viz/__init__.py +9 -0
  178. tnfr/viz/matplotlib.py +246 -0
  179. tnfr-7.0.0.dist-info/METADATA +179 -0
  180. tnfr-7.0.0.dist-info/RECORD +185 -0
  181. {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/licenses/LICENSE.md +1 -1
  182. tnfr/grammar.py +0 -344
  183. tnfr/graph_utils.py +0 -84
  184. tnfr/helpers/__init__.py +0 -71
  185. tnfr/import_utils.py +0 -228
  186. tnfr/json_utils.py +0 -162
  187. tnfr/logging_utils.py +0 -116
  188. tnfr/presets.py +0 -60
  189. tnfr/validators.py +0 -84
  190. tnfr/value_utils.py +0 -59
  191. tnfr-4.5.2.dist-info/METADATA +0 -379
  192. tnfr-4.5.2.dist-info/RECORD +0 -67
  193. {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/WHEEL +0 -0
  194. {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/entry_points.txt +0 -0
  195. {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,170 @@
1
+ """Soft grammar filters harmonising canonical selector heuristics."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import TYPE_CHECKING, Any, Callable
6
+
7
+ from ..alias import get_attr
8
+ from ..constants.aliases import ALIAS_D2EPI
9
+ from ..glyph_history import recent_glyph
10
+ from ..types import Glyph
11
+ from ..utils import clamp01
12
+ from .rules import glyph_fallback, get_norm, normalized_dnfr
13
+
14
+ if TYPE_CHECKING: # pragma: no cover - import cycle guard
15
+ from collections.abc import Mapping
16
+ from ..operators.grammar import GrammarContext
17
+
18
+ __all__ = (
19
+ "acceleration_norm",
20
+ "check_repeats",
21
+ "maybe_force",
22
+ "soft_grammar_filters",
23
+ )
24
+
25
+
26
+ def acceleration_norm(ctx: "GrammarContext", nd: "Mapping[str, Any]") -> float:
27
+ """Return the node acceleration normalised to ``[0, 1]``.
28
+
29
+ The computation uses the canonical ``accel_max`` bound stored in
30
+ :class:`~tnfr.operators.grammar.GrammarContext`. Values beyond the bound are
31
+ clamped to preserve structural comparability with ΔNFR-based heuristics.
32
+ """
33
+
34
+ max_val = get_norm(ctx, "accel_max")
35
+ return clamp01(abs(get_attr(nd, ALIAS_D2EPI, 0.0)) / max_val)
36
+ def check_repeats(
37
+ ctx: "GrammarContext", n: Any, cand: Glyph | str
38
+ ) -> Glyph | str:
39
+ """Swap ``cand`` when it breaches the configured repetition window.
40
+
41
+ The rule honours the soft grammar configuration stored in ``ctx.cfg_soft``:
42
+
43
+ * ``window`` specifies the history length inspected using
44
+ :func:`tnfr.glyph_history.recent_glyph`.
45
+ * ``avoid_repeats`` enumerates glyph codes to dodge within that window.
46
+ * ``fallbacks`` optionally map avoided glyph codes to explicit
47
+ replacements, defaulting to canonical fallbacks when unspecified.
48
+ """
49
+
50
+ nd = ctx.G.nodes[n]
51
+ cfg = ctx.cfg_soft
52
+ gwin = int(cfg.get("window", 0))
53
+ avoid = set(cfg.get("avoid_repeats", []))
54
+ fallbacks = cfg.get("fallbacks", {})
55
+ cand_key = cand.value if isinstance(cand, Glyph) else str(cand)
56
+ if gwin > 0 and cand_key in avoid and recent_glyph(nd, cand_key, gwin):
57
+ fallback = glyph_fallback(cand_key, fallbacks)
58
+ fallback_key = fallback.value if isinstance(fallback, Glyph) else str(fallback)
59
+ if fallback_key != cand_key:
60
+ return fallback
61
+ history: list[str] = []
62
+ for item in nd.get("glyph_history", ()):
63
+ if isinstance(item, Glyph):
64
+ history.append(item.value)
65
+ else:
66
+ try:
67
+ history.append(Glyph(str(item)).value)
68
+ except (TypeError, ValueError):
69
+ history.append(str(item))
70
+ order = (*history[-gwin:], cand_key)
71
+ from ..operators import grammar as _grammar
72
+
73
+ def to_structural(value: Glyph | str) -> str:
74
+ default = value.value if isinstance(value, Glyph) else str(value)
75
+ result = _grammar.glyph_function_name(value, default=default)
76
+ return default if result is None else result
77
+
78
+ cand_name = to_structural(cand_key)
79
+ fallback_name = to_structural(fallback_key)
80
+ order_names = tuple(to_structural(item) for item in order)
81
+
82
+ raise _grammar.RepeatWindowError(
83
+ rule="repeat-window",
84
+ candidate=cand_name,
85
+ message=f"{cand_name} repeats within window {gwin}",
86
+ window=gwin,
87
+ order=order_names,
88
+ context={"fallback": fallback_name},
89
+ )
90
+ return cand
91
+
92
+
93
+ def maybe_force(
94
+ ctx: "GrammarContext",
95
+ n: Any,
96
+ cand: Glyph | str,
97
+ original: Glyph | str,
98
+ accessor: Callable[["GrammarContext", "Mapping[str, Any]"], float],
99
+ key: str,
100
+ ) -> Glyph | str:
101
+ """Return ``original`` when ``accessor`` crosses the soft threshold ``key``.
102
+
103
+ ``accessor`` receives the grammar context and node attributes. Whenever the
104
+ resulting score is greater than or equal to the configured threshold the
105
+ original candidate is restored, ensuring soft filters never override
106
+ high-confidence canonical choices.
107
+ """
108
+
109
+ if cand == original:
110
+ return cand
111
+ force_th = float(ctx.cfg_soft.get(key, 0.60))
112
+ if accessor(ctx, ctx.G.nodes[n]) >= force_th:
113
+ return original
114
+ return cand
115
+
116
+
117
+ def _match_template(result: Glyph | str, template: Glyph | str) -> Glyph | str:
118
+ """Coerce ``result`` to mirror ``template``'s representation when possible."""
119
+
120
+ if isinstance(template, str) and isinstance(result, Glyph):
121
+ return result.value
122
+ if isinstance(template, Glyph) and isinstance(result, str):
123
+ try:
124
+ return Glyph(result)
125
+ except (TypeError, ValueError):
126
+ return result
127
+ return result
128
+
129
+
130
+ def soft_grammar_filters(
131
+ ctx: "GrammarContext",
132
+ n: Any,
133
+ cand: Glyph | str,
134
+ *,
135
+ original: Glyph | str | None = None,
136
+ template: Glyph | str | None = None,
137
+ ) -> Glyph | str:
138
+ """Apply the canonical soft grammar pipeline for ``cand``.
139
+
140
+ The pipeline performs three ordered checks:
141
+
142
+ 1. :func:`check_repeats` swaps recent repetitions for the configured
143
+ fallback glyphs.
144
+ 2. :func:`maybe_force` with :func:`tnfr.validation.rules.normalized_dnfr`
145
+ restores the original glyph when ΔNFR already exceeds ``force_dnfr``.
146
+ 3. :func:`maybe_force` with :func:`acceleration_norm` performs the same
147
+ safeguard using the ``force_accel`` threshold.
148
+
149
+ Parameters
150
+ ----------
151
+ ctx:
152
+ Active grammar context providing node access and configuration.
153
+ n:
154
+ Node identifier inside ``ctx.G``.
155
+ cand:
156
+ Candidate glyph (``Glyph`` or code string) to validate softly.
157
+ original:
158
+ Optional glyph used as the reference for :func:`maybe_force`. When
159
+ omitted, ``cand`` before filtering is used as the anchor value.
160
+ template:
161
+ Optional value whose type will be preserved in the returned result. By
162
+ default the original ``cand`` representation is used.
163
+ """
164
+
165
+ anchor = cand if original is None else original
166
+ filtered = check_repeats(ctx, n, cand)
167
+ filtered = maybe_force(ctx, n, filtered, anchor, normalized_dnfr, "force_dnfr")
168
+ filtered = maybe_force(ctx, n, filtered, anchor, acceleration_norm, "force_accel")
169
+ base_template = cand if template is None else template
170
+ return _match_template(filtered, base_template)
@@ -0,0 +1,37 @@
1
+ from typing import Any, Callable, Mapping
2
+
3
+ from ..types import Glyph
4
+ from .grammar import GrammarContext
5
+
6
+ __all__ = (
7
+ "acceleration_norm",
8
+ "check_repeats",
9
+ "maybe_force",
10
+ "soft_grammar_filters",
11
+ )
12
+
13
+
14
+ def acceleration_norm(ctx: GrammarContext, nd: Mapping[str, Any]) -> float: ...
15
+
16
+
17
+ def check_repeats(ctx: GrammarContext, n: Any, cand: Glyph | str) -> Glyph | str: ...
18
+
19
+
20
+ def maybe_force(
21
+ ctx: GrammarContext,
22
+ n: Any,
23
+ cand: Glyph | str,
24
+ original: Glyph | str,
25
+ accessor: Callable[[GrammarContext, Mapping[str, Any]], float],
26
+ key: str,
27
+ ) -> Glyph | str: ...
28
+
29
+
30
+ def soft_grammar_filters(
31
+ ctx: GrammarContext,
32
+ n: Any,
33
+ cand: Glyph | str,
34
+ *,
35
+ original: Glyph | str | None = ...,
36
+ template: Glyph | str | None = ...,
37
+ ) -> Glyph | str: ...
@@ -0,0 +1,159 @@
1
+ """Spectral validation helpers aligned with the TNFR canonical interface."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass
6
+ from typing import Any, Mapping, Sequence
7
+
8
+ import numpy as np
9
+
10
+ from ..mathematics.operators import CoherenceOperator, FrequencyOperator
11
+ from ..mathematics.spaces import HilbertSpace
12
+ from ..mathematics.runtime import (
13
+ coherence as runtime_coherence,
14
+ frequency_positive as runtime_frequency_positive,
15
+ normalized as runtime_normalized,
16
+ stable_unitary as runtime_stable_unitary,
17
+ )
18
+ from . import ValidationOutcome, Validator
19
+
20
+ __all__ = ("NFRValidator",)
21
+
22
+
23
+ @dataclass(slots=True)
24
+ class NFRValidator(Validator[np.ndarray]):
25
+ """Validate spectral states against TNFR canonical invariants."""
26
+
27
+ hilbert_space: HilbertSpace
28
+ coherence_operator: CoherenceOperator
29
+ coherence_threshold: float
30
+ frequency_operator: FrequencyOperator | None = None
31
+ atol: float = 1e-9
32
+
33
+ def _compute_summary(
34
+ self,
35
+ state: Sequence[complex] | np.ndarray,
36
+ *,
37
+ enforce_frequency_positivity: bool | None = None,
38
+ ) -> tuple[bool, dict[str, Any], np.ndarray]:
39
+ vector = self.hilbert_space.project(state)
40
+
41
+ normalized_passed, norm_value = runtime_normalized(
42
+ vector, self.hilbert_space, atol=self.atol
43
+ )
44
+ if np.isclose(norm_value, 0.0, atol=self.atol):
45
+ raise ValueError("Cannot normalise a null state vector.")
46
+ normalised_vector = vector / norm_value
47
+
48
+ coherence_passed, coherence_value = runtime_coherence(
49
+ normalised_vector,
50
+ self.coherence_operator,
51
+ self.coherence_threshold,
52
+ normalise=False,
53
+ atol=self.atol,
54
+ )
55
+
56
+ frequency_summary: dict[str, Any] | None = None
57
+ freq_ok = True
58
+ if self.frequency_operator is not None:
59
+ if enforce_frequency_positivity is None:
60
+ enforce_frequency_positivity = True
61
+
62
+ runtime_summary = runtime_frequency_positive(
63
+ normalised_vector,
64
+ self.frequency_operator,
65
+ normalise=False,
66
+ enforce=enforce_frequency_positivity,
67
+ atol=self.atol,
68
+ )
69
+ freq_ok = bool(runtime_summary["passed"])
70
+ frequency_summary = {
71
+ **runtime_summary,
72
+ "enforced": runtime_summary["enforce"],
73
+ }
74
+ frequency_summary.pop("enforce", None)
75
+ elif enforce_frequency_positivity:
76
+ raise ValueError("Frequency positivity enforcement requested without operator.")
77
+
78
+ unitary_passed, unitary_norm = runtime_stable_unitary(
79
+ normalised_vector,
80
+ self.coherence_operator,
81
+ self.hilbert_space,
82
+ normalise=False,
83
+ atol=self.atol,
84
+ )
85
+
86
+ summary: dict[str, Any] = {
87
+ "normalized": bool(normalized_passed),
88
+ "coherence": {
89
+ "passed": bool(coherence_passed),
90
+ "value": coherence_value,
91
+ "threshold": self.coherence_threshold,
92
+ },
93
+ "frequency": frequency_summary,
94
+ "unitary_stability": {
95
+ "passed": bool(unitary_passed),
96
+ "norm_after": unitary_norm,
97
+ },
98
+ }
99
+
100
+ overall = bool(normalized_passed and coherence_passed and freq_ok and unitary_passed)
101
+ return overall, summary, normalised_vector
102
+
103
+ def validate(
104
+ self,
105
+ subject: Sequence[complex] | np.ndarray,
106
+ /,
107
+ *,
108
+ enforce_frequency_positivity: bool | None = None,
109
+ ) -> ValidationOutcome[np.ndarray]:
110
+ """Return :class:`ValidationOutcome` for ``subject``."""
111
+
112
+ overall, summary, normalised_vector = self._compute_summary(
113
+ subject, enforce_frequency_positivity=enforce_frequency_positivity
114
+ )
115
+ artifacts = {"normalised_state": normalised_vector}
116
+ return ValidationOutcome(
117
+ subject=normalised_vector,
118
+ passed=overall,
119
+ summary=summary,
120
+ artifacts=artifacts,
121
+ )
122
+
123
+ def validate_state(
124
+ self,
125
+ state: Sequence[complex] | np.ndarray,
126
+ *,
127
+ enforce_frequency_positivity: bool | None = None,
128
+ ) -> tuple[bool, dict[str, Any]]:
129
+ """Backward compatible validation returning ``(passed, summary)``."""
130
+
131
+ overall, summary, _ = self._compute_summary(
132
+ state, enforce_frequency_positivity=enforce_frequency_positivity
133
+ )
134
+ return overall, summary
135
+
136
+ def report(self, outcome: ValidationOutcome[np.ndarray]) -> str:
137
+ """Return a human-readable report naming failed conditions."""
138
+
139
+ summary = outcome.summary
140
+ failed_checks: list[str] = []
141
+ if not summary.get("normalized", False):
142
+ failed_checks.append("normalization")
143
+
144
+ coherence_summary = summary.get("coherence", {})
145
+ if not coherence_summary.get("passed", False):
146
+ failed_checks.append("coherence threshold")
147
+
148
+ frequency_summary = summary.get("frequency")
149
+ if isinstance(frequency_summary, Mapping) and not frequency_summary.get("passed", False):
150
+ failed_checks.append("frequency positivity")
151
+
152
+ unitary_summary = summary.get("unitary_stability", {})
153
+ if not unitary_summary.get("passed", False):
154
+ failed_checks.append("unitary stability")
155
+
156
+ if not failed_checks:
157
+ return "All validation checks passed."
158
+ return "Failed checks: " + ", ".join(failed_checks) + "."
159
+
@@ -0,0 +1,46 @@
1
+ from typing import Any, Mapping, Sequence
2
+
3
+ import numpy as np
4
+
5
+ from ..mathematics.operators import CoherenceOperator, FrequencyOperator
6
+ from ..mathematics.spaces import HilbertSpace
7
+ from . import ValidationOutcome, Validator
8
+
9
+
10
+ class NFRValidator(Validator[np.ndarray]):
11
+ hilbert_space: HilbertSpace
12
+ coherence_operator: CoherenceOperator
13
+ coherence_threshold: float
14
+ frequency_operator: FrequencyOperator | None
15
+ atol: float
16
+
17
+ def __init__(
18
+ self,
19
+ hilbert_space: HilbertSpace,
20
+ coherence_operator: CoherenceOperator,
21
+ *,
22
+ coherence_threshold: float,
23
+ frequency_operator: FrequencyOperator | None = ...,
24
+ atol: float = ...,
25
+ ) -> None: ...
26
+
27
+ def validate(
28
+ self,
29
+ subject: Sequence[complex] | np.ndarray,
30
+ /,
31
+ *,
32
+ enforce_frequency_positivity: bool | None = ...,
33
+ ) -> ValidationOutcome[np.ndarray]: ... # type: ignore[override]
34
+
35
+ def validate_state(
36
+ self,
37
+ state: Sequence[complex] | np.ndarray,
38
+ *,
39
+ enforce_frequency_positivity: bool | None = ...,
40
+ ) -> tuple[bool, Mapping[str, Any]]: ...
41
+
42
+ def report(self, outcome: ValidationOutcome[np.ndarray]) -> str: ...
43
+
44
+
45
+ __all__ = ("NFRValidator",)
46
+
@@ -0,0 +1,40 @@
1
+ """Deprecated entry point for sequence validation.
2
+
3
+ Projects should import :func:`tnfr.operators.grammar.validate_sequence` directly;
4
+ this shim remains only for backwards compatibility and will be removed in a
5
+ future release.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from collections.abc import Iterable
11
+ from typing import Any
12
+ import warnings
13
+
14
+ from . import ValidationOutcome
15
+ from ..operators import grammar as _canonical
16
+ from ..operators.grammar import validate_sequence as _validate_sequence
17
+
18
+ __all__ = ("validate_sequence",)
19
+
20
+ warnings.warn(
21
+ "'tnfr.validation.syntax' is deprecated; use 'tnfr.operators.grammar' "
22
+ "for sequence validation.",
23
+ DeprecationWarning,
24
+ stacklevel=2,
25
+ )
26
+
27
+
28
+ def validate_sequence(
29
+ names: Iterable[str] | object = _canonical._MISSING, **kwargs: Any
30
+ ) -> ValidationOutcome[tuple[str, ...]]:
31
+ """Proxy to :func:`tnfr.operators.grammar.validate_sequence`."""
32
+
33
+ warnings.warn(
34
+ "'tnfr.validation.syntax.validate_sequence' is deprecated; "
35
+ "use 'tnfr.operators.grammar.validate_sequence' instead.",
36
+ DeprecationWarning,
37
+ stacklevel=2,
38
+ )
39
+ return _validate_sequence(names, **kwargs)
40
+
@@ -0,0 +1,10 @@
1
+ from collections.abc import Iterable
2
+
3
+ from . import ValidationOutcome
4
+
5
+ __all__ = ("validate_sequence",)
6
+
7
+
8
+ def validate_sequence(
9
+ names: Iterable[str] | object = ..., **kwargs: object
10
+ ) -> ValidationOutcome[tuple[str, ...]]: ...
@@ -0,0 +1,39 @@
1
+ """Primitive window validation helpers without heavy dependencies."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import numbers
6
+
7
+ __all__ = ["validate_window"]
8
+
9
+
10
+ def validate_window(window: int, *, positive: bool = False) -> int:
11
+ """Validate ``window`` as an integer and return it.
12
+
13
+ Parameters
14
+ ----------
15
+ window:
16
+ Value to coerce into an integer window size.
17
+ positive:
18
+ When ``True`` the window must be strictly greater than zero; otherwise
19
+ zero is accepted. Negative values are never permitted.
20
+
21
+ Returns
22
+ -------
23
+ int
24
+ The validated integer window size.
25
+
26
+ Raises
27
+ ------
28
+ TypeError
29
+ If ``window`` is not an integer value.
30
+ ValueError
31
+ If ``window`` violates the positivity constraint.
32
+ """
33
+
34
+ if isinstance(window, bool) or not isinstance(window, numbers.Integral):
35
+ raise TypeError("'window' must be an integer")
36
+ if window < 0 or (positive and window == 0):
37
+ kind = "positive" if positive else "non-negative"
38
+ raise ValueError(f"'window'={window} must be {kind}")
39
+ return int(window)
@@ -0,0 +1 @@
1
+ def validate_window(window: int, *, positive: bool = ...) -> int: ...
tnfr/viz/__init__.py ADDED
@@ -0,0 +1,9 @@
1
+ """Visualization helpers for TNFR telemetry."""
2
+
3
+ from .matplotlib import plot_coherence_matrix, plot_phase_sync, plot_spectrum_path
4
+
5
+ __all__ = [
6
+ "plot_coherence_matrix",
7
+ "plot_phase_sync",
8
+ "plot_spectrum_path",
9
+ ]