tnfr 6.0.0__py3-none-any.whl → 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (176) hide show
  1. tnfr/__init__.py +50 -5
  2. tnfr/__init__.pyi +0 -7
  3. tnfr/_compat.py +0 -1
  4. tnfr/_generated_version.py +34 -0
  5. tnfr/_version.py +44 -2
  6. tnfr/alias.py +14 -13
  7. tnfr/alias.pyi +5 -37
  8. tnfr/cache.py +9 -729
  9. tnfr/cache.pyi +8 -224
  10. tnfr/callback_utils.py +16 -31
  11. tnfr/callback_utils.pyi +3 -29
  12. tnfr/cli/__init__.py +17 -11
  13. tnfr/cli/__init__.pyi +0 -21
  14. tnfr/cli/arguments.py +175 -14
  15. tnfr/cli/arguments.pyi +5 -11
  16. tnfr/cli/execution.py +434 -48
  17. tnfr/cli/execution.pyi +14 -24
  18. tnfr/cli/utils.py +20 -3
  19. tnfr/cli/utils.pyi +5 -5
  20. tnfr/config/__init__.py +2 -1
  21. tnfr/config/__init__.pyi +2 -0
  22. tnfr/config/feature_flags.py +83 -0
  23. tnfr/config/init.py +1 -1
  24. tnfr/config/operator_names.py +1 -14
  25. tnfr/config/presets.py +6 -26
  26. tnfr/constants/__init__.py +10 -13
  27. tnfr/constants/__init__.pyi +10 -22
  28. tnfr/constants/aliases.py +31 -0
  29. tnfr/constants/core.py +4 -3
  30. tnfr/constants/init.py +1 -1
  31. tnfr/constants/metric.py +3 -3
  32. tnfr/dynamics/__init__.py +64 -10
  33. tnfr/dynamics/__init__.pyi +3 -4
  34. tnfr/dynamics/adaptation.py +79 -13
  35. tnfr/dynamics/aliases.py +10 -9
  36. tnfr/dynamics/coordination.py +77 -35
  37. tnfr/dynamics/dnfr.py +575 -274
  38. tnfr/dynamics/dnfr.pyi +1 -10
  39. tnfr/dynamics/integrators.py +47 -33
  40. tnfr/dynamics/integrators.pyi +0 -1
  41. tnfr/dynamics/runtime.py +489 -129
  42. tnfr/dynamics/sampling.py +2 -0
  43. tnfr/dynamics/selectors.py +101 -62
  44. tnfr/execution.py +15 -8
  45. tnfr/execution.pyi +5 -25
  46. tnfr/flatten.py +7 -3
  47. tnfr/flatten.pyi +1 -8
  48. tnfr/gamma.py +22 -26
  49. tnfr/gamma.pyi +0 -6
  50. tnfr/glyph_history.py +37 -26
  51. tnfr/glyph_history.pyi +1 -19
  52. tnfr/glyph_runtime.py +16 -0
  53. tnfr/glyph_runtime.pyi +9 -0
  54. tnfr/immutable.py +20 -15
  55. tnfr/immutable.pyi +4 -7
  56. tnfr/initialization.py +5 -7
  57. tnfr/initialization.pyi +1 -9
  58. tnfr/io.py +6 -305
  59. tnfr/io.pyi +13 -8
  60. tnfr/mathematics/__init__.py +81 -0
  61. tnfr/mathematics/backend.py +426 -0
  62. tnfr/mathematics/dynamics.py +398 -0
  63. tnfr/mathematics/epi.py +254 -0
  64. tnfr/mathematics/generators.py +222 -0
  65. tnfr/mathematics/metrics.py +119 -0
  66. tnfr/mathematics/operators.py +233 -0
  67. tnfr/mathematics/operators_factory.py +71 -0
  68. tnfr/mathematics/projection.py +78 -0
  69. tnfr/mathematics/runtime.py +173 -0
  70. tnfr/mathematics/spaces.py +247 -0
  71. tnfr/mathematics/transforms.py +292 -0
  72. tnfr/metrics/__init__.py +10 -10
  73. tnfr/metrics/coherence.py +123 -94
  74. tnfr/metrics/common.py +22 -13
  75. tnfr/metrics/common.pyi +42 -11
  76. tnfr/metrics/core.py +72 -14
  77. tnfr/metrics/diagnosis.py +48 -57
  78. tnfr/metrics/diagnosis.pyi +3 -7
  79. tnfr/metrics/export.py +3 -5
  80. tnfr/metrics/glyph_timing.py +41 -31
  81. tnfr/metrics/reporting.py +13 -6
  82. tnfr/metrics/sense_index.py +884 -114
  83. tnfr/metrics/trig.py +167 -11
  84. tnfr/metrics/trig.pyi +1 -0
  85. tnfr/metrics/trig_cache.py +112 -15
  86. tnfr/node.py +400 -17
  87. tnfr/node.pyi +55 -38
  88. tnfr/observers.py +111 -8
  89. tnfr/observers.pyi +0 -15
  90. tnfr/ontosim.py +9 -6
  91. tnfr/ontosim.pyi +0 -5
  92. tnfr/operators/__init__.py +529 -42
  93. tnfr/operators/__init__.pyi +14 -0
  94. tnfr/operators/definitions.py +350 -18
  95. tnfr/operators/definitions.pyi +0 -14
  96. tnfr/operators/grammar.py +760 -0
  97. tnfr/operators/jitter.py +28 -22
  98. tnfr/operators/registry.py +7 -12
  99. tnfr/operators/registry.pyi +0 -2
  100. tnfr/operators/remesh.py +38 -61
  101. tnfr/rng.py +17 -300
  102. tnfr/schemas/__init__.py +8 -0
  103. tnfr/schemas/grammar.json +94 -0
  104. tnfr/selector.py +3 -4
  105. tnfr/selector.pyi +1 -1
  106. tnfr/sense.py +22 -24
  107. tnfr/sense.pyi +0 -7
  108. tnfr/structural.py +504 -21
  109. tnfr/structural.pyi +41 -18
  110. tnfr/telemetry/__init__.py +23 -1
  111. tnfr/telemetry/cache_metrics.py +226 -0
  112. tnfr/telemetry/nu_f.py +423 -0
  113. tnfr/telemetry/nu_f.pyi +123 -0
  114. tnfr/tokens.py +1 -4
  115. tnfr/tokens.pyi +1 -6
  116. tnfr/trace.py +20 -53
  117. tnfr/trace.pyi +9 -37
  118. tnfr/types.py +244 -15
  119. tnfr/types.pyi +200 -14
  120. tnfr/units.py +69 -0
  121. tnfr/units.pyi +16 -0
  122. tnfr/utils/__init__.py +107 -48
  123. tnfr/utils/__init__.pyi +80 -11
  124. tnfr/utils/cache.py +1705 -65
  125. tnfr/utils/cache.pyi +370 -58
  126. tnfr/utils/chunks.py +104 -0
  127. tnfr/utils/chunks.pyi +21 -0
  128. tnfr/utils/data.py +95 -5
  129. tnfr/utils/data.pyi +8 -17
  130. tnfr/utils/graph.py +2 -4
  131. tnfr/utils/init.py +31 -7
  132. tnfr/utils/init.pyi +4 -11
  133. tnfr/utils/io.py +313 -14
  134. tnfr/{helpers → utils}/numeric.py +50 -24
  135. tnfr/utils/numeric.pyi +21 -0
  136. tnfr/validation/__init__.py +92 -4
  137. tnfr/validation/__init__.pyi +77 -17
  138. tnfr/validation/compatibility.py +79 -43
  139. tnfr/validation/compatibility.pyi +4 -6
  140. tnfr/validation/grammar.py +55 -133
  141. tnfr/validation/grammar.pyi +37 -8
  142. tnfr/validation/graph.py +138 -0
  143. tnfr/validation/graph.pyi +17 -0
  144. tnfr/validation/rules.py +161 -74
  145. tnfr/validation/rules.pyi +55 -18
  146. tnfr/validation/runtime.py +263 -0
  147. tnfr/validation/runtime.pyi +31 -0
  148. tnfr/validation/soft_filters.py +170 -0
  149. tnfr/validation/soft_filters.pyi +37 -0
  150. tnfr/validation/spectral.py +159 -0
  151. tnfr/validation/spectral.pyi +46 -0
  152. tnfr/validation/syntax.py +28 -139
  153. tnfr/validation/syntax.pyi +7 -4
  154. tnfr/validation/window.py +39 -0
  155. tnfr/validation/window.pyi +1 -0
  156. tnfr/viz/__init__.py +9 -0
  157. tnfr/viz/matplotlib.py +246 -0
  158. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/METADATA +63 -19
  159. tnfr-7.0.0.dist-info/RECORD +185 -0
  160. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/licenses/LICENSE.md +1 -1
  161. tnfr/constants_glyphs.py +0 -16
  162. tnfr/constants_glyphs.pyi +0 -12
  163. tnfr/grammar.py +0 -25
  164. tnfr/grammar.pyi +0 -13
  165. tnfr/helpers/__init__.py +0 -151
  166. tnfr/helpers/__init__.pyi +0 -66
  167. tnfr/helpers/numeric.pyi +0 -12
  168. tnfr/presets.py +0 -15
  169. tnfr/presets.pyi +0 -7
  170. tnfr/utils/io.pyi +0 -10
  171. tnfr/utils/validators.py +0 -130
  172. tnfr/utils/validators.pyi +0 -19
  173. tnfr-6.0.0.dist-info/RECORD +0 -157
  174. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/WHEEL +0 -0
  175. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/entry_points.txt +0 -0
  176. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,170 @@
1
+ """Soft grammar filters harmonising canonical selector heuristics."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import TYPE_CHECKING, Any, Callable
6
+
7
+ from ..alias import get_attr
8
+ from ..constants.aliases import ALIAS_D2EPI
9
+ from ..glyph_history import recent_glyph
10
+ from ..types import Glyph
11
+ from ..utils import clamp01
12
+ from .rules import glyph_fallback, get_norm, normalized_dnfr
13
+
14
+ if TYPE_CHECKING: # pragma: no cover - import cycle guard
15
+ from collections.abc import Mapping
16
+ from ..operators.grammar import GrammarContext
17
+
18
+ __all__ = (
19
+ "acceleration_norm",
20
+ "check_repeats",
21
+ "maybe_force",
22
+ "soft_grammar_filters",
23
+ )
24
+
25
+
26
+ def acceleration_norm(ctx: "GrammarContext", nd: "Mapping[str, Any]") -> float:
27
+ """Return the node acceleration normalised to ``[0, 1]``.
28
+
29
+ The computation uses the canonical ``accel_max`` bound stored in
30
+ :class:`~tnfr.operators.grammar.GrammarContext`. Values beyond the bound are
31
+ clamped to preserve structural comparability with ΔNFR-based heuristics.
32
+ """
33
+
34
+ max_val = get_norm(ctx, "accel_max")
35
+ return clamp01(abs(get_attr(nd, ALIAS_D2EPI, 0.0)) / max_val)
36
+ def check_repeats(
37
+ ctx: "GrammarContext", n: Any, cand: Glyph | str
38
+ ) -> Glyph | str:
39
+ """Swap ``cand`` when it breaches the configured repetition window.
40
+
41
+ The rule honours the soft grammar configuration stored in ``ctx.cfg_soft``:
42
+
43
+ * ``window`` specifies the history length inspected using
44
+ :func:`tnfr.glyph_history.recent_glyph`.
45
+ * ``avoid_repeats`` enumerates glyph codes to dodge within that window.
46
+ * ``fallbacks`` optionally map avoided glyph codes to explicit
47
+ replacements, defaulting to canonical fallbacks when unspecified.
48
+ """
49
+
50
+ nd = ctx.G.nodes[n]
51
+ cfg = ctx.cfg_soft
52
+ gwin = int(cfg.get("window", 0))
53
+ avoid = set(cfg.get("avoid_repeats", []))
54
+ fallbacks = cfg.get("fallbacks", {})
55
+ cand_key = cand.value if isinstance(cand, Glyph) else str(cand)
56
+ if gwin > 0 and cand_key in avoid and recent_glyph(nd, cand_key, gwin):
57
+ fallback = glyph_fallback(cand_key, fallbacks)
58
+ fallback_key = fallback.value if isinstance(fallback, Glyph) else str(fallback)
59
+ if fallback_key != cand_key:
60
+ return fallback
61
+ history: list[str] = []
62
+ for item in nd.get("glyph_history", ()):
63
+ if isinstance(item, Glyph):
64
+ history.append(item.value)
65
+ else:
66
+ try:
67
+ history.append(Glyph(str(item)).value)
68
+ except (TypeError, ValueError):
69
+ history.append(str(item))
70
+ order = (*history[-gwin:], cand_key)
71
+ from ..operators import grammar as _grammar
72
+
73
+ def to_structural(value: Glyph | str) -> str:
74
+ default = value.value if isinstance(value, Glyph) else str(value)
75
+ result = _grammar.glyph_function_name(value, default=default)
76
+ return default if result is None else result
77
+
78
+ cand_name = to_structural(cand_key)
79
+ fallback_name = to_structural(fallback_key)
80
+ order_names = tuple(to_structural(item) for item in order)
81
+
82
+ raise _grammar.RepeatWindowError(
83
+ rule="repeat-window",
84
+ candidate=cand_name,
85
+ message=f"{cand_name} repeats within window {gwin}",
86
+ window=gwin,
87
+ order=order_names,
88
+ context={"fallback": fallback_name},
89
+ )
90
+ return cand
91
+
92
+
93
+ def maybe_force(
94
+ ctx: "GrammarContext",
95
+ n: Any,
96
+ cand: Glyph | str,
97
+ original: Glyph | str,
98
+ accessor: Callable[["GrammarContext", "Mapping[str, Any]"], float],
99
+ key: str,
100
+ ) -> Glyph | str:
101
+ """Return ``original`` when ``accessor`` crosses the soft threshold ``key``.
102
+
103
+ ``accessor`` receives the grammar context and node attributes. Whenever the
104
+ resulting score is greater than or equal to the configured threshold the
105
+ original candidate is restored, ensuring soft filters never override
106
+ high-confidence canonical choices.
107
+ """
108
+
109
+ if cand == original:
110
+ return cand
111
+ force_th = float(ctx.cfg_soft.get(key, 0.60))
112
+ if accessor(ctx, ctx.G.nodes[n]) >= force_th:
113
+ return original
114
+ return cand
115
+
116
+
117
+ def _match_template(result: Glyph | str, template: Glyph | str) -> Glyph | str:
118
+ """Coerce ``result`` to mirror ``template``'s representation when possible."""
119
+
120
+ if isinstance(template, str) and isinstance(result, Glyph):
121
+ return result.value
122
+ if isinstance(template, Glyph) and isinstance(result, str):
123
+ try:
124
+ return Glyph(result)
125
+ except (TypeError, ValueError):
126
+ return result
127
+ return result
128
+
129
+
130
+ def soft_grammar_filters(
131
+ ctx: "GrammarContext",
132
+ n: Any,
133
+ cand: Glyph | str,
134
+ *,
135
+ original: Glyph | str | None = None,
136
+ template: Glyph | str | None = None,
137
+ ) -> Glyph | str:
138
+ """Apply the canonical soft grammar pipeline for ``cand``.
139
+
140
+ The pipeline performs three ordered checks:
141
+
142
+ 1. :func:`check_repeats` swaps recent repetitions for the configured
143
+ fallback glyphs.
144
+ 2. :func:`maybe_force` with :func:`tnfr.validation.rules.normalized_dnfr`
145
+ restores the original glyph when ΔNFR already exceeds ``force_dnfr``.
146
+ 3. :func:`maybe_force` with :func:`acceleration_norm` performs the same
147
+ safeguard using the ``force_accel`` threshold.
148
+
149
+ Parameters
150
+ ----------
151
+ ctx:
152
+ Active grammar context providing node access and configuration.
153
+ n:
154
+ Node identifier inside ``ctx.G``.
155
+ cand:
156
+ Candidate glyph (``Glyph`` or code string) to validate softly.
157
+ original:
158
+ Optional glyph used as the reference for :func:`maybe_force`. When
159
+ omitted, ``cand`` before filtering is used as the anchor value.
160
+ template:
161
+ Optional value whose type will be preserved in the returned result. By
162
+ default the original ``cand`` representation is used.
163
+ """
164
+
165
+ anchor = cand if original is None else original
166
+ filtered = check_repeats(ctx, n, cand)
167
+ filtered = maybe_force(ctx, n, filtered, anchor, normalized_dnfr, "force_dnfr")
168
+ filtered = maybe_force(ctx, n, filtered, anchor, acceleration_norm, "force_accel")
169
+ base_template = cand if template is None else template
170
+ return _match_template(filtered, base_template)
@@ -0,0 +1,37 @@
1
+ from typing import Any, Callable, Mapping
2
+
3
+ from ..types import Glyph
4
+ from .grammar import GrammarContext
5
+
6
+ __all__ = (
7
+ "acceleration_norm",
8
+ "check_repeats",
9
+ "maybe_force",
10
+ "soft_grammar_filters",
11
+ )
12
+
13
+
14
+ def acceleration_norm(ctx: GrammarContext, nd: Mapping[str, Any]) -> float: ...
15
+
16
+
17
+ def check_repeats(ctx: GrammarContext, n: Any, cand: Glyph | str) -> Glyph | str: ...
18
+
19
+
20
+ def maybe_force(
21
+ ctx: GrammarContext,
22
+ n: Any,
23
+ cand: Glyph | str,
24
+ original: Glyph | str,
25
+ accessor: Callable[[GrammarContext, Mapping[str, Any]], float],
26
+ key: str,
27
+ ) -> Glyph | str: ...
28
+
29
+
30
+ def soft_grammar_filters(
31
+ ctx: GrammarContext,
32
+ n: Any,
33
+ cand: Glyph | str,
34
+ *,
35
+ original: Glyph | str | None = ...,
36
+ template: Glyph | str | None = ...,
37
+ ) -> Glyph | str: ...
@@ -0,0 +1,159 @@
1
+ """Spectral validation helpers aligned with the TNFR canonical interface."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass
6
+ from typing import Any, Mapping, Sequence
7
+
8
+ import numpy as np
9
+
10
+ from ..mathematics.operators import CoherenceOperator, FrequencyOperator
11
+ from ..mathematics.spaces import HilbertSpace
12
+ from ..mathematics.runtime import (
13
+ coherence as runtime_coherence,
14
+ frequency_positive as runtime_frequency_positive,
15
+ normalized as runtime_normalized,
16
+ stable_unitary as runtime_stable_unitary,
17
+ )
18
+ from . import ValidationOutcome, Validator
19
+
20
+ __all__ = ("NFRValidator",)
21
+
22
+
23
+ @dataclass(slots=True)
24
+ class NFRValidator(Validator[np.ndarray]):
25
+ """Validate spectral states against TNFR canonical invariants."""
26
+
27
+ hilbert_space: HilbertSpace
28
+ coherence_operator: CoherenceOperator
29
+ coherence_threshold: float
30
+ frequency_operator: FrequencyOperator | None = None
31
+ atol: float = 1e-9
32
+
33
+ def _compute_summary(
34
+ self,
35
+ state: Sequence[complex] | np.ndarray,
36
+ *,
37
+ enforce_frequency_positivity: bool | None = None,
38
+ ) -> tuple[bool, dict[str, Any], np.ndarray]:
39
+ vector = self.hilbert_space.project(state)
40
+
41
+ normalized_passed, norm_value = runtime_normalized(
42
+ vector, self.hilbert_space, atol=self.atol
43
+ )
44
+ if np.isclose(norm_value, 0.0, atol=self.atol):
45
+ raise ValueError("Cannot normalise a null state vector.")
46
+ normalised_vector = vector / norm_value
47
+
48
+ coherence_passed, coherence_value = runtime_coherence(
49
+ normalised_vector,
50
+ self.coherence_operator,
51
+ self.coherence_threshold,
52
+ normalise=False,
53
+ atol=self.atol,
54
+ )
55
+
56
+ frequency_summary: dict[str, Any] | None = None
57
+ freq_ok = True
58
+ if self.frequency_operator is not None:
59
+ if enforce_frequency_positivity is None:
60
+ enforce_frequency_positivity = True
61
+
62
+ runtime_summary = runtime_frequency_positive(
63
+ normalised_vector,
64
+ self.frequency_operator,
65
+ normalise=False,
66
+ enforce=enforce_frequency_positivity,
67
+ atol=self.atol,
68
+ )
69
+ freq_ok = bool(runtime_summary["passed"])
70
+ frequency_summary = {
71
+ **runtime_summary,
72
+ "enforced": runtime_summary["enforce"],
73
+ }
74
+ frequency_summary.pop("enforce", None)
75
+ elif enforce_frequency_positivity:
76
+ raise ValueError("Frequency positivity enforcement requested without operator.")
77
+
78
+ unitary_passed, unitary_norm = runtime_stable_unitary(
79
+ normalised_vector,
80
+ self.coherence_operator,
81
+ self.hilbert_space,
82
+ normalise=False,
83
+ atol=self.atol,
84
+ )
85
+
86
+ summary: dict[str, Any] = {
87
+ "normalized": bool(normalized_passed),
88
+ "coherence": {
89
+ "passed": bool(coherence_passed),
90
+ "value": coherence_value,
91
+ "threshold": self.coherence_threshold,
92
+ },
93
+ "frequency": frequency_summary,
94
+ "unitary_stability": {
95
+ "passed": bool(unitary_passed),
96
+ "norm_after": unitary_norm,
97
+ },
98
+ }
99
+
100
+ overall = bool(normalized_passed and coherence_passed and freq_ok and unitary_passed)
101
+ return overall, summary, normalised_vector
102
+
103
+ def validate(
104
+ self,
105
+ subject: Sequence[complex] | np.ndarray,
106
+ /,
107
+ *,
108
+ enforce_frequency_positivity: bool | None = None,
109
+ ) -> ValidationOutcome[np.ndarray]:
110
+ """Return :class:`ValidationOutcome` for ``subject``."""
111
+
112
+ overall, summary, normalised_vector = self._compute_summary(
113
+ subject, enforce_frequency_positivity=enforce_frequency_positivity
114
+ )
115
+ artifacts = {"normalised_state": normalised_vector}
116
+ return ValidationOutcome(
117
+ subject=normalised_vector,
118
+ passed=overall,
119
+ summary=summary,
120
+ artifacts=artifacts,
121
+ )
122
+
123
+ def validate_state(
124
+ self,
125
+ state: Sequence[complex] | np.ndarray,
126
+ *,
127
+ enforce_frequency_positivity: bool | None = None,
128
+ ) -> tuple[bool, dict[str, Any]]:
129
+ """Backward compatible validation returning ``(passed, summary)``."""
130
+
131
+ overall, summary, _ = self._compute_summary(
132
+ state, enforce_frequency_positivity=enforce_frequency_positivity
133
+ )
134
+ return overall, summary
135
+
136
+ def report(self, outcome: ValidationOutcome[np.ndarray]) -> str:
137
+ """Return a human-readable report naming failed conditions."""
138
+
139
+ summary = outcome.summary
140
+ failed_checks: list[str] = []
141
+ if not summary.get("normalized", False):
142
+ failed_checks.append("normalization")
143
+
144
+ coherence_summary = summary.get("coherence", {})
145
+ if not coherence_summary.get("passed", False):
146
+ failed_checks.append("coherence threshold")
147
+
148
+ frequency_summary = summary.get("frequency")
149
+ if isinstance(frequency_summary, Mapping) and not frequency_summary.get("passed", False):
150
+ failed_checks.append("frequency positivity")
151
+
152
+ unitary_summary = summary.get("unitary_stability", {})
153
+ if not unitary_summary.get("passed", False):
154
+ failed_checks.append("unitary stability")
155
+
156
+ if not failed_checks:
157
+ return "All validation checks passed."
158
+ return "Failed checks: " + ", ".join(failed_checks) + "."
159
+
@@ -0,0 +1,46 @@
1
+ from typing import Any, Mapping, Sequence
2
+
3
+ import numpy as np
4
+
5
+ from ..mathematics.operators import CoherenceOperator, FrequencyOperator
6
+ from ..mathematics.spaces import HilbertSpace
7
+ from . import ValidationOutcome, Validator
8
+
9
+
10
+ class NFRValidator(Validator[np.ndarray]):
11
+ hilbert_space: HilbertSpace
12
+ coherence_operator: CoherenceOperator
13
+ coherence_threshold: float
14
+ frequency_operator: FrequencyOperator | None
15
+ atol: float
16
+
17
+ def __init__(
18
+ self,
19
+ hilbert_space: HilbertSpace,
20
+ coherence_operator: CoherenceOperator,
21
+ *,
22
+ coherence_threshold: float,
23
+ frequency_operator: FrequencyOperator | None = ...,
24
+ atol: float = ...,
25
+ ) -> None: ...
26
+
27
+ def validate(
28
+ self,
29
+ subject: Sequence[complex] | np.ndarray,
30
+ /,
31
+ *,
32
+ enforce_frequency_positivity: bool | None = ...,
33
+ ) -> ValidationOutcome[np.ndarray]: ... # type: ignore[override]
34
+
35
+ def validate_state(
36
+ self,
37
+ state: Sequence[complex] | np.ndarray,
38
+ *,
39
+ enforce_frequency_positivity: bool | None = ...,
40
+ ) -> tuple[bool, Mapping[str, Any]]: ...
41
+
42
+ def report(self, outcome: ValidationOutcome[np.ndarray]) -> str: ...
43
+
44
+
45
+ __all__ = ("NFRValidator",)
46
+
tnfr/validation/syntax.py CHANGED
@@ -1,151 +1,40 @@
1
- """Syntax validation for TNFR operator sequences."""
1
+ """Deprecated entry point for sequence validation.
2
+
3
+ Projects should import :func:`tnfr.operators.grammar.validate_sequence` directly;
4
+ this shim remains only for backwards compatibility and will be removed in a
5
+ future release.
6
+ """
2
7
 
3
8
  from __future__ import annotations
4
9
 
5
10
  from collections.abc import Iterable
11
+ from typing import Any
12
+ import warnings
6
13
 
7
- from ..operators.registry import OPERATORS
8
- from ..config.operator_names import (
9
- COHERENCE,
10
- INTERMEDIATE_OPERATORS,
11
- RECEPTION,
12
- SELF_ORGANIZATION,
13
- SELF_ORGANIZATION_CLOSURES,
14
- VALID_END_OPERATORS,
15
- VALID_START_OPERATORS,
16
- canonical_operator_name,
17
- operator_display_name,
18
- )
14
+ from . import ValidationOutcome
15
+ from ..operators import grammar as _canonical
16
+ from ..operators.grammar import validate_sequence as _validate_sequence
19
17
 
20
18
  __all__ = ("validate_sequence",)
21
19
 
22
-
23
- _MISSING = object()
24
-
25
-
26
- _CANONICAL_START = tuple(sorted(VALID_START_OPERATORS))
27
- _CANONICAL_INTERMEDIATE = tuple(sorted(INTERMEDIATE_OPERATORS))
28
- _CANONICAL_END = tuple(sorted(VALID_END_OPERATORS))
29
-
30
-
31
- def _format_token_group(tokens: tuple[str, ...]) -> str:
32
- return ", ".join(operator_display_name(token) for token in sorted(tokens))
33
-
34
-
35
- def _validate_start(token: str) -> tuple[bool, str]:
36
- """Ensure the sequence begins with a valid structural operator."""
37
-
38
- if not isinstance(token, str):
39
- return False, "tokens must be str"
40
- if token not in VALID_START_OPERATORS:
41
- valid_tokens = _format_token_group(_CANONICAL_START)
42
- return False, f"must start with {valid_tokens}"
43
- return True, ""
44
-
45
-
46
- def _validate_intermediate(
47
- found_reception: bool, found_coherence: bool, seen_intermediate: bool
48
- ) -> tuple[bool, str]:
49
- """Check that the central TNFR segment is present."""
50
-
51
- if not (found_reception and found_coherence):
52
- return False, f"missing {RECEPTION}→{COHERENCE} segment"
53
- if not seen_intermediate:
54
- intermediate_tokens = _format_token_group(_CANONICAL_INTERMEDIATE)
55
- return False, f"missing {intermediate_tokens} segment"
56
- return True, ""
57
-
58
-
59
- def _validate_end(last_token: str, open_thol: bool) -> tuple[bool, str]:
60
- """Validate closing operator and any pending THOL blocks."""
61
-
62
- if last_token not in VALID_END_OPERATORS:
63
- cierre_tokens = _format_token_group(_CANONICAL_END)
64
- return False, f"sequence must end with {cierre_tokens}"
65
- if open_thol:
66
- return False, "THOL block without closure"
67
- return True, ""
68
-
69
-
70
- def _validate_known_tokens(
71
- token_to_canonical: dict[str, str]
72
- ) -> tuple[bool, str]:
73
- """Ensure all tokens map to canonical operators."""
74
-
75
- unknown_tokens = {
76
- alias
77
- for alias, canonical in token_to_canonical.items()
78
- if canonical not in OPERATORS
79
- }
80
- if unknown_tokens:
81
- ordered = ", ".join(sorted(unknown_tokens))
82
- return False, f"unknown tokens: {ordered}"
83
- return True, ""
84
-
85
-
86
- def _validate_token_sequence(names: list[str]) -> tuple[bool, str]:
87
- """Validate token format and logical coherence in one pass."""
88
-
89
- if not names:
90
- return False, "empty sequence"
91
-
92
- ok, msg = _validate_start(names[0])
93
- if not ok:
94
- return False, msg
95
-
96
- token_to_canonical: dict[str, str] = {}
97
- found_reception = False
98
- found_coherence = False
99
- seen_intermediate = False
100
- open_thol = False
101
-
102
- for name in names:
103
- if not isinstance(name, str):
104
- return False, "tokens must be str"
105
- canonical = canonical_operator_name(name)
106
- token_to_canonical[name] = canonical
107
-
108
- if canonical == RECEPTION and not found_reception:
109
- found_reception = True
110
- elif found_reception and canonical == COHERENCE and not found_coherence:
111
- found_coherence = True
112
- elif (
113
- found_coherence
114
- and not seen_intermediate
115
- and canonical in INTERMEDIATE_OPERATORS
116
- ):
117
- seen_intermediate = True
118
-
119
- if canonical == SELF_ORGANIZATION:
120
- open_thol = True
121
- elif open_thol and canonical in SELF_ORGANIZATION_CLOSURES:
122
- open_thol = False
123
-
124
- ok, msg = _validate_known_tokens(token_to_canonical)
125
- if not ok:
126
- return False, msg
127
- ok, msg = _validate_intermediate(found_reception, found_coherence, seen_intermediate)
128
- if not ok:
129
- return False, msg
130
- ok, msg = _validate_end(names[-1], open_thol)
131
- if not ok:
132
- return False, msg
133
- return True, "ok"
20
+ warnings.warn(
21
+ "'tnfr.validation.syntax' is deprecated; use 'tnfr.operators.grammar' "
22
+ "for sequence validation.",
23
+ DeprecationWarning,
24
+ stacklevel=2,
25
+ )
134
26
 
135
27
 
136
28
  def validate_sequence(
137
- names: Iterable[str] | object = _MISSING, **kwargs: object
138
- ) -> tuple[bool, str]:
139
- """Validate minimal TNFR syntax rules."""
140
-
141
- if kwargs:
142
- unexpected = ", ".join(sorted(kwargs))
143
- raise TypeError(
144
- f"validate_sequence() got unexpected keyword argument(s): {unexpected}"
145
- )
146
-
147
- if names is _MISSING:
148
- raise TypeError("validate_sequence() missing required argument: 'names'")
29
+ names: Iterable[str] | object = _canonical._MISSING, **kwargs: Any
30
+ ) -> ValidationOutcome[tuple[str, ...]]:
31
+ """Proxy to :func:`tnfr.operators.grammar.validate_sequence`."""
32
+
33
+ warnings.warn(
34
+ "'tnfr.validation.syntax.validate_sequence' is deprecated; "
35
+ "use 'tnfr.operators.grammar.validate_sequence' instead.",
36
+ DeprecationWarning,
37
+ stacklevel=2,
38
+ )
39
+ return _validate_sequence(names, **kwargs)
149
40
 
150
- sequence = list(names)
151
- return _validate_token_sequence(sequence)
@@ -1,7 +1,10 @@
1
- from typing import Any
1
+ from collections.abc import Iterable
2
2
 
3
- __all__: Any
3
+ from . import ValidationOutcome
4
4
 
5
- def __getattr__(name: str) -> Any: ...
5
+ __all__ = ("validate_sequence",)
6
6
 
7
- validate_sequence: Any
7
+
8
+ def validate_sequence(
9
+ names: Iterable[str] | object = ..., **kwargs: object
10
+ ) -> ValidationOutcome[tuple[str, ...]]: ...
@@ -0,0 +1,39 @@
1
+ """Primitive window validation helpers without heavy dependencies."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import numbers
6
+
7
+ __all__ = ["validate_window"]
8
+
9
+
10
+ def validate_window(window: int, *, positive: bool = False) -> int:
11
+ """Validate ``window`` as an integer and return it.
12
+
13
+ Parameters
14
+ ----------
15
+ window:
16
+ Value to coerce into an integer window size.
17
+ positive:
18
+ When ``True`` the window must be strictly greater than zero; otherwise
19
+ zero is accepted. Negative values are never permitted.
20
+
21
+ Returns
22
+ -------
23
+ int
24
+ The validated integer window size.
25
+
26
+ Raises
27
+ ------
28
+ TypeError
29
+ If ``window`` is not an integer value.
30
+ ValueError
31
+ If ``window`` violates the positivity constraint.
32
+ """
33
+
34
+ if isinstance(window, bool) or not isinstance(window, numbers.Integral):
35
+ raise TypeError("'window' must be an integer")
36
+ if window < 0 or (positive and window == 0):
37
+ kind = "positive" if positive else "non-negative"
38
+ raise ValueError(f"'window'={window} must be {kind}")
39
+ return int(window)
@@ -0,0 +1 @@
1
+ def validate_window(window: int, *, positive: bool = ...) -> int: ...
tnfr/viz/__init__.py ADDED
@@ -0,0 +1,9 @@
1
+ """Visualization helpers for TNFR telemetry."""
2
+
3
+ from .matplotlib import plot_coherence_matrix, plot_phase_sync, plot_spectrum_path
4
+
5
+ __all__ = [
6
+ "plot_coherence_matrix",
7
+ "plot_phase_sync",
8
+ "plot_spectrum_path",
9
+ ]