tnfr 6.0.0__py3-none-any.whl → 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (176) hide show
  1. tnfr/__init__.py +50 -5
  2. tnfr/__init__.pyi +0 -7
  3. tnfr/_compat.py +0 -1
  4. tnfr/_generated_version.py +34 -0
  5. tnfr/_version.py +44 -2
  6. tnfr/alias.py +14 -13
  7. tnfr/alias.pyi +5 -37
  8. tnfr/cache.py +9 -729
  9. tnfr/cache.pyi +8 -224
  10. tnfr/callback_utils.py +16 -31
  11. tnfr/callback_utils.pyi +3 -29
  12. tnfr/cli/__init__.py +17 -11
  13. tnfr/cli/__init__.pyi +0 -21
  14. tnfr/cli/arguments.py +175 -14
  15. tnfr/cli/arguments.pyi +5 -11
  16. tnfr/cli/execution.py +434 -48
  17. tnfr/cli/execution.pyi +14 -24
  18. tnfr/cli/utils.py +20 -3
  19. tnfr/cli/utils.pyi +5 -5
  20. tnfr/config/__init__.py +2 -1
  21. tnfr/config/__init__.pyi +2 -0
  22. tnfr/config/feature_flags.py +83 -0
  23. tnfr/config/init.py +1 -1
  24. tnfr/config/operator_names.py +1 -14
  25. tnfr/config/presets.py +6 -26
  26. tnfr/constants/__init__.py +10 -13
  27. tnfr/constants/__init__.pyi +10 -22
  28. tnfr/constants/aliases.py +31 -0
  29. tnfr/constants/core.py +4 -3
  30. tnfr/constants/init.py +1 -1
  31. tnfr/constants/metric.py +3 -3
  32. tnfr/dynamics/__init__.py +64 -10
  33. tnfr/dynamics/__init__.pyi +3 -4
  34. tnfr/dynamics/adaptation.py +79 -13
  35. tnfr/dynamics/aliases.py +10 -9
  36. tnfr/dynamics/coordination.py +77 -35
  37. tnfr/dynamics/dnfr.py +575 -274
  38. tnfr/dynamics/dnfr.pyi +1 -10
  39. tnfr/dynamics/integrators.py +47 -33
  40. tnfr/dynamics/integrators.pyi +0 -1
  41. tnfr/dynamics/runtime.py +489 -129
  42. tnfr/dynamics/sampling.py +2 -0
  43. tnfr/dynamics/selectors.py +101 -62
  44. tnfr/execution.py +15 -8
  45. tnfr/execution.pyi +5 -25
  46. tnfr/flatten.py +7 -3
  47. tnfr/flatten.pyi +1 -8
  48. tnfr/gamma.py +22 -26
  49. tnfr/gamma.pyi +0 -6
  50. tnfr/glyph_history.py +37 -26
  51. tnfr/glyph_history.pyi +1 -19
  52. tnfr/glyph_runtime.py +16 -0
  53. tnfr/glyph_runtime.pyi +9 -0
  54. tnfr/immutable.py +20 -15
  55. tnfr/immutable.pyi +4 -7
  56. tnfr/initialization.py +5 -7
  57. tnfr/initialization.pyi +1 -9
  58. tnfr/io.py +6 -305
  59. tnfr/io.pyi +13 -8
  60. tnfr/mathematics/__init__.py +81 -0
  61. tnfr/mathematics/backend.py +426 -0
  62. tnfr/mathematics/dynamics.py +398 -0
  63. tnfr/mathematics/epi.py +254 -0
  64. tnfr/mathematics/generators.py +222 -0
  65. tnfr/mathematics/metrics.py +119 -0
  66. tnfr/mathematics/operators.py +233 -0
  67. tnfr/mathematics/operators_factory.py +71 -0
  68. tnfr/mathematics/projection.py +78 -0
  69. tnfr/mathematics/runtime.py +173 -0
  70. tnfr/mathematics/spaces.py +247 -0
  71. tnfr/mathematics/transforms.py +292 -0
  72. tnfr/metrics/__init__.py +10 -10
  73. tnfr/metrics/coherence.py +123 -94
  74. tnfr/metrics/common.py +22 -13
  75. tnfr/metrics/common.pyi +42 -11
  76. tnfr/metrics/core.py +72 -14
  77. tnfr/metrics/diagnosis.py +48 -57
  78. tnfr/metrics/diagnosis.pyi +3 -7
  79. tnfr/metrics/export.py +3 -5
  80. tnfr/metrics/glyph_timing.py +41 -31
  81. tnfr/metrics/reporting.py +13 -6
  82. tnfr/metrics/sense_index.py +884 -114
  83. tnfr/metrics/trig.py +167 -11
  84. tnfr/metrics/trig.pyi +1 -0
  85. tnfr/metrics/trig_cache.py +112 -15
  86. tnfr/node.py +400 -17
  87. tnfr/node.pyi +55 -38
  88. tnfr/observers.py +111 -8
  89. tnfr/observers.pyi +0 -15
  90. tnfr/ontosim.py +9 -6
  91. tnfr/ontosim.pyi +0 -5
  92. tnfr/operators/__init__.py +529 -42
  93. tnfr/operators/__init__.pyi +14 -0
  94. tnfr/operators/definitions.py +350 -18
  95. tnfr/operators/definitions.pyi +0 -14
  96. tnfr/operators/grammar.py +760 -0
  97. tnfr/operators/jitter.py +28 -22
  98. tnfr/operators/registry.py +7 -12
  99. tnfr/operators/registry.pyi +0 -2
  100. tnfr/operators/remesh.py +38 -61
  101. tnfr/rng.py +17 -300
  102. tnfr/schemas/__init__.py +8 -0
  103. tnfr/schemas/grammar.json +94 -0
  104. tnfr/selector.py +3 -4
  105. tnfr/selector.pyi +1 -1
  106. tnfr/sense.py +22 -24
  107. tnfr/sense.pyi +0 -7
  108. tnfr/structural.py +504 -21
  109. tnfr/structural.pyi +41 -18
  110. tnfr/telemetry/__init__.py +23 -1
  111. tnfr/telemetry/cache_metrics.py +226 -0
  112. tnfr/telemetry/nu_f.py +423 -0
  113. tnfr/telemetry/nu_f.pyi +123 -0
  114. tnfr/tokens.py +1 -4
  115. tnfr/tokens.pyi +1 -6
  116. tnfr/trace.py +20 -53
  117. tnfr/trace.pyi +9 -37
  118. tnfr/types.py +244 -15
  119. tnfr/types.pyi +200 -14
  120. tnfr/units.py +69 -0
  121. tnfr/units.pyi +16 -0
  122. tnfr/utils/__init__.py +107 -48
  123. tnfr/utils/__init__.pyi +80 -11
  124. tnfr/utils/cache.py +1705 -65
  125. tnfr/utils/cache.pyi +370 -58
  126. tnfr/utils/chunks.py +104 -0
  127. tnfr/utils/chunks.pyi +21 -0
  128. tnfr/utils/data.py +95 -5
  129. tnfr/utils/data.pyi +8 -17
  130. tnfr/utils/graph.py +2 -4
  131. tnfr/utils/init.py +31 -7
  132. tnfr/utils/init.pyi +4 -11
  133. tnfr/utils/io.py +313 -14
  134. tnfr/{helpers → utils}/numeric.py +50 -24
  135. tnfr/utils/numeric.pyi +21 -0
  136. tnfr/validation/__init__.py +92 -4
  137. tnfr/validation/__init__.pyi +77 -17
  138. tnfr/validation/compatibility.py +79 -43
  139. tnfr/validation/compatibility.pyi +4 -6
  140. tnfr/validation/grammar.py +55 -133
  141. tnfr/validation/grammar.pyi +37 -8
  142. tnfr/validation/graph.py +138 -0
  143. tnfr/validation/graph.pyi +17 -0
  144. tnfr/validation/rules.py +161 -74
  145. tnfr/validation/rules.pyi +55 -18
  146. tnfr/validation/runtime.py +263 -0
  147. tnfr/validation/runtime.pyi +31 -0
  148. tnfr/validation/soft_filters.py +170 -0
  149. tnfr/validation/soft_filters.pyi +37 -0
  150. tnfr/validation/spectral.py +159 -0
  151. tnfr/validation/spectral.pyi +46 -0
  152. tnfr/validation/syntax.py +28 -139
  153. tnfr/validation/syntax.pyi +7 -4
  154. tnfr/validation/window.py +39 -0
  155. tnfr/validation/window.pyi +1 -0
  156. tnfr/viz/__init__.py +9 -0
  157. tnfr/viz/matplotlib.py +246 -0
  158. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/METADATA +63 -19
  159. tnfr-7.0.0.dist-info/RECORD +185 -0
  160. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/licenses/LICENSE.md +1 -1
  161. tnfr/constants_glyphs.py +0 -16
  162. tnfr/constants_glyphs.pyi +0 -12
  163. tnfr/grammar.py +0 -25
  164. tnfr/grammar.pyi +0 -13
  165. tnfr/helpers/__init__.py +0 -151
  166. tnfr/helpers/__init__.pyi +0 -66
  167. tnfr/helpers/numeric.pyi +0 -12
  168. tnfr/presets.py +0 -15
  169. tnfr/presets.pyi +0 -7
  170. tnfr/utils/io.pyi +0 -10
  171. tnfr/utils/validators.py +0 -130
  172. tnfr/utils/validators.pyi +0 -19
  173. tnfr-6.0.0.dist-info/RECORD +0 -157
  174. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/WHEEL +0 -0
  175. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/entry_points.txt +0 -0
  176. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/top_level.txt +0 -0
tnfr/cli/execution.py CHANGED
@@ -1,47 +1,69 @@
1
+ """CLI execution helpers for running canonical TNFR programs."""
2
+
1
3
  from __future__ import annotations
2
4
 
3
5
  import argparse
4
-
6
+ from collections import deque
7
+ from collections.abc import Mapping
5
8
  from copy import deepcopy
9
+ from importlib import import_module
6
10
  from pathlib import Path
7
- from typing import Any, Optional
11
+ from typing import Any, Optional, Sequence
8
12
 
9
13
  import networkx as nx
14
+ import numpy as np
10
15
 
11
- from ..constants import METRIC_DEFAULTS
12
- from ..sense import register_sigma_callback
13
- from ..metrics import (
14
- register_metrics_callbacks,
15
- glyph_top,
16
- export_metrics,
17
- build_metrics_summary,
18
- )
19
- from ..metrics.core import _metrics_step
20
- from ..trace import register_trace
21
- from ..execution import CANONICAL_PRESET_NAME, play
22
- from ..dynamics import (
23
- run,
24
- default_glyph_selector,
25
- parametric_glyph_selector,
26
- validate_canon,
27
- )
16
+ from ..config import apply_config
28
17
  from ..config.presets import (
29
18
  PREFERRED_PRESET_NAMES,
30
19
  get_preset,
31
- legacy_preset_guidance,
32
20
  )
33
- from ..config import apply_config
34
- from ..io import read_structured_file, safe_write, StructuredFileError
21
+ from ..alias import get_attr
22
+ from ..constants import METRIC_DEFAULTS, VF_PRIMARY, get_aliases, get_param
23
+ from ..dynamics import default_glyph_selector, parametric_glyph_selector, run
24
+ from ..execution import CANONICAL_PRESET_NAME, play
25
+ from ..flatten import parse_program_tokens
35
26
  from ..glyph_history import ensure_history
27
+ from ..mathematics import (
28
+ BasicStateProjector,
29
+ CoherenceOperator,
30
+ FrequencyOperator,
31
+ HilbertSpace,
32
+ MathematicalDynamicsEngine,
33
+ NFRValidator,
34
+ make_coherence_operator,
35
+ make_frequency_operator,
36
+ )
37
+ from ..metrics import (
38
+ build_metrics_summary,
39
+ export_metrics,
40
+ glyph_top,
41
+ register_metrics_callbacks,
42
+ )
43
+ from ..metrics.core import _metrics_step
36
44
  from ..ontosim import prepare_network
45
+ from ..sense import register_sigma_callback
46
+ from ..trace import register_trace
37
47
  from ..types import ProgramTokens
38
- from ..utils import get_logger, json_dumps
39
- from ..flatten import parse_program_tokens
40
-
48
+ from ..utils import (
49
+ StructuredFileError,
50
+ clamp01,
51
+ get_logger,
52
+ json_dumps,
53
+ read_structured_file,
54
+ safe_write,
55
+ )
41
56
  from .arguments import _args_to_dict
57
+ from .utils import _parse_cli_variants
58
+ from ..validation import validate_canon
42
59
 
43
60
  logger = get_logger(__name__)
44
61
 
62
+ _VF_ALIASES = get_aliases("VF")
63
+ VF_ALIAS_KEYS: tuple[str, ...] = (VF_PRIMARY,) + tuple(
64
+ alias for alias in _VF_ALIASES if alias != VF_PRIMARY
65
+ )
66
+
45
67
 
46
68
  # CLI summaries should remain concise by default while allowing callers to
47
69
  # inspect the full glyphogram series when needed.
@@ -59,11 +81,17 @@ def _attach_callbacks(G: "nx.Graph") -> None:
59
81
  register_sigma_callback(G)
60
82
  register_metrics_callbacks(G)
61
83
  register_trace(G)
84
+ history = ensure_history(G)
85
+ maxlen = int(get_param(G, "PROGRAM_TRACE_MAXLEN"))
86
+ history.setdefault("program_trace", deque(maxlen=maxlen))
87
+ history.setdefault("trace_meta", [])
62
88
  _metrics_step(G, ctx=None)
63
89
 
64
90
 
65
91
  def _persist_history(G: "nx.Graph", args: argparse.Namespace) -> None:
66
- if getattr(args, "save_history", None) or getattr(args, "export_history_base", None):
92
+ if getattr(args, "save_history", None) or getattr(
93
+ args, "export_history_base", None
94
+ ):
67
95
  history = ensure_history(G)
68
96
  if getattr(args, "save_history", None):
69
97
  _save_json(args.save_history, history)
@@ -71,7 +99,155 @@ def _persist_history(G: "nx.Graph", args: argparse.Namespace) -> None:
71
99
  export_metrics(G, args.export_history_base, fmt=args.export_format)
72
100
 
73
101
 
102
+ def _to_float_array(values: Sequence[float] | None, *, name: str) -> np.ndarray | None:
103
+ if values is None:
104
+ return None
105
+ array = np.asarray(list(values), dtype=float)
106
+ if array.ndim != 1:
107
+ raise ValueError(f"{name} must be a one-dimensional sequence of numbers")
108
+ return array
109
+
110
+
111
+ def _resolve_math_dimension(
112
+ args: argparse.Namespace, fallback: int
113
+ ) -> int:
114
+ dimension = getattr(args, "math_dimension", None)
115
+ candidate_lengths: list[int] = []
116
+ for attr in (
117
+ "math_coherence_spectrum",
118
+ "math_frequency_diagonal",
119
+ "math_generator_diagonal",
120
+ ):
121
+ seq = getattr(args, attr, None)
122
+ if seq is not None:
123
+ candidate_lengths.append(len(seq))
124
+ if dimension is None:
125
+ if candidate_lengths:
126
+ unique = set(candidate_lengths)
127
+ if len(unique) > 1:
128
+ raise ValueError(
129
+ "Math engine configuration requires matching sequence lengths"
130
+ )
131
+ dimension = unique.pop()
132
+ else:
133
+ dimension = fallback
134
+ else:
135
+ for length in candidate_lengths:
136
+ if length != dimension:
137
+ raise ValueError(
138
+ "Math engine sequence lengths must match the requested dimension"
139
+ )
140
+ if dimension is None or dimension <= 0:
141
+ raise ValueError("Hilbert space dimension must be a positive integer")
142
+ return int(dimension)
143
+
144
+
145
+ def _build_math_engine_config(
146
+ G: "nx.Graph", args: argparse.Namespace
147
+ ) -> dict[str, Any]:
148
+ node_count = G.number_of_nodes() if hasattr(G, "number_of_nodes") else len(list(G.nodes))
149
+ fallback_dim = max(1, int(node_count) if node_count is not None else 1)
150
+ dimension = _resolve_math_dimension(args, fallback=fallback_dim)
151
+
152
+ coherence_spectrum = _to_float_array(
153
+ getattr(args, "math_coherence_spectrum", None),
154
+ name="--math-coherence-spectrum",
155
+ )
156
+ if coherence_spectrum is not None and coherence_spectrum.size != dimension:
157
+ raise ValueError("Coherence spectrum length must equal the Hilbert dimension")
158
+
159
+ frequency_diagonal = _to_float_array(
160
+ getattr(args, "math_frequency_diagonal", None),
161
+ name="--math-frequency-diagonal",
162
+ )
163
+ if frequency_diagonal is not None and frequency_diagonal.size != dimension:
164
+ raise ValueError("Frequency diagonal length must equal the Hilbert dimension")
165
+
166
+ generator_diagonal = _to_float_array(
167
+ getattr(args, "math_generator_diagonal", None),
168
+ name="--math-generator-diagonal",
169
+ )
170
+ if generator_diagonal is not None and generator_diagonal.size != dimension:
171
+ raise ValueError("Generator diagonal length must equal the Hilbert dimension")
172
+
173
+ coherence_c_min = getattr(args, "math_coherence_c_min", None)
174
+ if coherence_spectrum is None:
175
+ coherence_operator = make_coherence_operator(
176
+ dimension,
177
+ c_min=float(coherence_c_min) if coherence_c_min is not None else 0.1,
178
+ )
179
+ else:
180
+ if coherence_c_min is not None:
181
+ coherence_operator = CoherenceOperator(
182
+ coherence_spectrum, c_min=float(coherence_c_min)
183
+ )
184
+ else:
185
+ coherence_operator = CoherenceOperator(coherence_spectrum)
186
+ if not coherence_operator.is_positive_semidefinite():
187
+ raise ValueError("Coherence spectrum must be positive semidefinite")
188
+
189
+ frequency_matrix: np.ndarray
190
+ if frequency_diagonal is None:
191
+ frequency_matrix = np.eye(dimension, dtype=float)
192
+ else:
193
+ frequency_matrix = np.diag(frequency_diagonal)
194
+ frequency_operator = make_frequency_operator(frequency_matrix)
195
+
196
+ generator_matrix: np.ndarray
197
+ if generator_diagonal is None:
198
+ generator_matrix = np.zeros((dimension, dimension), dtype=float)
199
+ else:
200
+ generator_matrix = np.diag(generator_diagonal)
201
+
202
+ hilbert_space = HilbertSpace(dimension)
203
+ dynamics_engine = MathematicalDynamicsEngine(
204
+ generator_matrix,
205
+ hilbert_space=hilbert_space,
206
+ )
207
+
208
+ coherence_threshold = getattr(args, "math_coherence_threshold", None)
209
+ if coherence_threshold is None:
210
+ coherence_threshold = float(coherence_operator.c_min)
211
+ else:
212
+ coherence_threshold = float(coherence_threshold)
213
+
214
+ state_projector = BasicStateProjector()
215
+ validator = NFRValidator(
216
+ hilbert_space,
217
+ coherence_operator,
218
+ coherence_threshold,
219
+ frequency_operator=frequency_operator,
220
+ )
221
+
222
+ return {
223
+ "enabled": True,
224
+ "dimension": dimension,
225
+ "hilbert_space": hilbert_space,
226
+ "coherence_operator": coherence_operator,
227
+ "frequency_operator": frequency_operator,
228
+ "coherence_threshold": coherence_threshold,
229
+ "state_projector": state_projector,
230
+ "validator": validator,
231
+ "dynamics_engine": dynamics_engine,
232
+ "generator_matrix": generator_matrix,
233
+ }
234
+
235
+
236
+ def _configure_math_engine(G: "nx.Graph", args: argparse.Namespace) -> None:
237
+ if not getattr(args, "math_engine", False):
238
+ G.graph.pop("MATH_ENGINE", None)
239
+ return
240
+ try:
241
+ config = _build_math_engine_config(G, args)
242
+ except ValueError as exc:
243
+ logger.error("Math engine configuration error: %s", exc)
244
+ raise SystemExit(1) from exc
245
+ G.graph["MATH_ENGINE"] = config
246
+
247
+
74
248
  def build_basic_graph(args: argparse.Namespace) -> "nx.Graph":
249
+ """Construct the base graph topology described by CLI ``args``."""
250
+
75
251
  n = args.nodes
76
252
  topology = getattr(args, "topology", "ring").lower()
77
253
  seed = getattr(args, "seed", None)
@@ -87,7 +263,7 @@ def build_basic_graph(args: argparse.Namespace) -> "nx.Graph":
87
263
  fallback = 0.0
88
264
  else:
89
265
  fallback = 3.0 / n
90
- prob = min(max(fallback, 0.0), 1.0)
266
+ prob = clamp01(fallback)
91
267
  if not 0.0 <= prob <= 1.0:
92
268
  raise ValueError(f"p must be between 0 and 1; received {prob}")
93
269
  G = nx.gnp_random_graph(n, prob, seed=seed)
@@ -101,8 +277,14 @@ def build_basic_graph(args: argparse.Namespace) -> "nx.Graph":
101
277
 
102
278
 
103
279
  def apply_cli_config(G: "nx.Graph", args: argparse.Namespace) -> None:
280
+ """Apply CLI overrides from ``args`` to graph-level configuration."""
281
+
104
282
  if args.config:
105
- apply_config(G, Path(args.config))
283
+ try:
284
+ apply_config(G, Path(args.config))
285
+ except (StructuredFileError, ValueError) as exc:
286
+ logger.error("%s", exc)
287
+ raise SystemExit(1) from exc
106
288
  arg_map = {
107
289
  "dt": ("DT", float),
108
290
  "integrator": ("INTEGRATOR_METHOD", str),
@@ -114,8 +296,18 @@ def apply_cli_config(G: "nx.Graph", args: argparse.Namespace) -> None:
114
296
  if val is not None:
115
297
  G.graph[key] = conv(val)
116
298
 
299
+ base_gcanon: dict[str, Any]
300
+ existing_gcanon = G.graph.get("GRAMMAR_CANON")
301
+ if isinstance(existing_gcanon, Mapping):
302
+ base_gcanon = {
303
+ **METRIC_DEFAULTS["GRAMMAR_CANON"],
304
+ **dict(existing_gcanon),
305
+ }
306
+ else:
307
+ base_gcanon = dict(METRIC_DEFAULTS["GRAMMAR_CANON"])
308
+
117
309
  gcanon = {
118
- **METRIC_DEFAULTS["GRAMMAR_CANON"],
310
+ **base_gcanon,
119
311
  **_args_to_dict(args, prefix="grammar_"),
120
312
  }
121
313
  if getattr(args, "grammar_canon", None) is not None:
@@ -128,9 +320,7 @@ def apply_cli_config(G: "nx.Graph", args: argparse.Namespace) -> None:
128
320
  "basic": default_glyph_selector,
129
321
  "param": parametric_glyph_selector,
130
322
  }
131
- G.graph["glyph_selector"] = sel_map.get(
132
- selector, default_glyph_selector
133
- )
323
+ G.graph["glyph_selector"] = sel_map.get(selector, default_glyph_selector)
134
324
 
135
325
  if hasattr(args, "gamma_type"):
136
326
  G.graph["GAMMA"] = {
@@ -151,8 +341,29 @@ def apply_cli_config(G: "nx.Graph", args: argparse.Namespace) -> None:
151
341
  if value is not None:
152
342
  cfg["verbosity"] = value
153
343
 
344
+ candidate_count = getattr(args, "um_candidate_count", None)
345
+ if candidate_count is not None:
346
+ G.graph["UM_CANDIDATE_COUNT"] = int(candidate_count)
347
+
348
+ stop_window = getattr(args, "stop_early_window", None)
349
+ stop_fraction = getattr(args, "stop_early_fraction", None)
350
+ if stop_window is not None or stop_fraction is not None:
351
+ stop_cfg = G.graph.get("STOP_EARLY")
352
+ if isinstance(stop_cfg, Mapping):
353
+ next_cfg = {**stop_cfg}
354
+ else:
355
+ next_cfg = deepcopy(METRIC_DEFAULTS["STOP_EARLY"])
356
+ if stop_window is not None:
357
+ next_cfg["window"] = int(stop_window)
358
+ if stop_fraction is not None:
359
+ next_cfg["fraction"] = float(stop_fraction)
360
+ next_cfg.setdefault("enabled", True)
361
+ G.graph["STOP_EARLY"] = next_cfg
362
+
154
363
 
155
364
  def register_callbacks_and_observer(G: "nx.Graph") -> None:
365
+ """Attach callbacks and validators required for CLI runs."""
366
+
156
367
  _attach_callbacks(G)
157
368
  validate_canon(G)
158
369
 
@@ -164,6 +375,7 @@ def _build_graph_from_args(args: argparse.Namespace) -> "nx.Graph":
164
375
  G.graph["ATTACH_STD_OBSERVER"] = True
165
376
  prepare_network(G)
166
377
  register_callbacks_and_observer(G)
378
+ _configure_math_engine(G, args)
167
379
  return G
168
380
 
169
381
 
@@ -177,25 +389,21 @@ def _load_sequence(path: Path) -> ProgramTokens:
177
389
  message = str(StructuredFileError(path, exc))
178
390
  logger.error("%s", message)
179
391
  raise SystemExit(1) from exc
392
+ if isinstance(data, Mapping) and "sequence" in data:
393
+ data = data["sequence"]
180
394
  return parse_program_tokens(data)
181
395
 
182
396
 
183
397
  def resolve_program(
184
398
  args: argparse.Namespace, default: Optional[ProgramTokens] = None
185
399
  ) -> Optional[ProgramTokens]:
400
+ """Resolve preset/sequence inputs into program tokens."""
401
+
186
402
  if getattr(args, "preset", None):
187
403
  try:
188
404
  return get_preset(args.preset)
189
405
  except KeyError as exc:
190
- guidance = legacy_preset_guidance(args.preset)
191
- if guidance is not None:
192
- details = guidance
193
- else:
194
- details = (
195
- exc.args[0]
196
- if exc.args
197
- else "Legacy preset identifier rejected."
198
- )
406
+ details = exc.args[0] if exc.args else "Preset lookup failed."
199
407
  logger.error(
200
408
  (
201
409
  "Unknown preset '%s'. Available presets: %s. %s "
@@ -216,6 +424,8 @@ def run_program(
216
424
  program: Optional[ProgramTokens],
217
425
  args: argparse.Namespace,
218
426
  ) -> "nx.Graph":
427
+ """Execute ``program`` (or timed run) on ``G`` using CLI options."""
428
+
219
429
  if G is None:
220
430
  G = _build_graph_from_args(args)
221
431
 
@@ -231,6 +441,13 @@ def run_program(
231
441
  if value is not None:
232
442
  run_kwargs[attr] = value
233
443
 
444
+ job_overrides: dict[str, Any] = {}
445
+ dnfr_jobs = getattr(args, "dnfr_n_jobs", None)
446
+ if dnfr_jobs is not None:
447
+ job_overrides["dnfr_n_jobs"] = int(dnfr_jobs)
448
+ if job_overrides:
449
+ run_kwargs["n_jobs"] = job_overrides
450
+
234
451
  run(G, steps=steps, **run_kwargs)
235
452
  else:
236
453
  play(G, program)
@@ -251,10 +468,118 @@ def _run_cli_program(
251
468
  code = exc.code if isinstance(exc.code, int) else 1
252
469
  return code or 1, None
253
470
 
254
- result_graph = run_program(graph, program, args)
471
+ try:
472
+ result_graph = run_program(graph, program, args)
473
+ except SystemExit as exc:
474
+ code = exc.code if isinstance(exc.code, int) else 1
475
+ return code or 1, None
255
476
  return 0, result_graph
256
477
 
257
478
 
479
+ def _log_math_engine_summary(G: "nx.Graph") -> None:
480
+ math_cfg = G.graph.get("MATH_ENGINE")
481
+ if not isinstance(math_cfg, Mapping) or not math_cfg.get("enabled"):
482
+ return
483
+
484
+ nodes = list(G.nodes)
485
+ if not nodes:
486
+ logger.info("[MATH] Math engine validation skipped: no nodes present")
487
+ return
488
+
489
+ hilbert_space: HilbertSpace = math_cfg["hilbert_space"]
490
+ coherence_operator: CoherenceOperator = math_cfg["coherence_operator"]
491
+ frequency_operator: FrequencyOperator | None = math_cfg.get("frequency_operator")
492
+ state_projector: BasicStateProjector = math_cfg.get(
493
+ "state_projector", BasicStateProjector()
494
+ )
495
+ validator: NFRValidator | None = math_cfg.get("validator")
496
+ if validator is None:
497
+ coherence_threshold = math_cfg.get("coherence_threshold")
498
+ validator = NFRValidator(
499
+ hilbert_space,
500
+ coherence_operator,
501
+ float(coherence_threshold) if coherence_threshold is not None else 0.0,
502
+ frequency_operator=frequency_operator,
503
+ )
504
+ math_cfg["validator"] = validator
505
+
506
+ enforce_frequency = bool(frequency_operator is not None)
507
+
508
+ norm_values: list[float] = []
509
+ normalized_flags: list[bool] = []
510
+ coherence_flags: list[bool] = []
511
+ coherence_values: list[float] = []
512
+ coherence_threshold: float | None = None
513
+ frequency_flags: list[bool] = []
514
+ frequency_values: list[float] = []
515
+ frequency_spectrum_min: float | None = None
516
+
517
+ for node_id in nodes:
518
+ data = G.nodes[node_id]
519
+ epi = float(data.get("EPI", 0.0))
520
+ nu_f = float(
521
+ get_attr(
522
+ data,
523
+ VF_ALIAS_KEYS,
524
+ default=float(data.get(VF_PRIMARY, 0.0)),
525
+ )
526
+ )
527
+ theta = float(data.get("theta", 0.0))
528
+ state = state_projector(epi=epi, nu_f=nu_f, theta=theta, dim=hilbert_space.dimension)
529
+ norm_values.append(float(hilbert_space.norm(state)))
530
+ outcome = validator.validate(
531
+ state,
532
+ enforce_frequency_positivity=enforce_frequency,
533
+ )
534
+ summary = outcome.summary
535
+ normalized_flags.append(bool(summary.get("normalized", False)))
536
+
537
+ coherence_summary = summary.get("coherence")
538
+ if isinstance(coherence_summary, Mapping):
539
+ coherence_flags.append(bool(coherence_summary.get("passed", False)))
540
+ coherence_values.append(float(coherence_summary.get("value", 0.0)))
541
+ if coherence_threshold is None and "threshold" in coherence_summary:
542
+ coherence_threshold = float(coherence_summary.get("threshold", 0.0))
543
+
544
+ frequency_summary = summary.get("frequency")
545
+ if isinstance(frequency_summary, Mapping):
546
+ frequency_flags.append(bool(frequency_summary.get("passed", False)))
547
+ frequency_values.append(float(frequency_summary.get("value", 0.0)))
548
+ if frequency_spectrum_min is None and "spectrum_min" in frequency_summary:
549
+ frequency_spectrum_min = float(frequency_summary.get("spectrum_min", 0.0))
550
+
551
+ if norm_values:
552
+ logger.info(
553
+ "[MATH] Hilbert norm preserved=%s (min=%.6f, max=%.6f)",
554
+ all(normalized_flags),
555
+ min(norm_values),
556
+ max(norm_values),
557
+ )
558
+
559
+ if coherence_values and coherence_threshold is not None:
560
+ logger.info(
561
+ "[MATH] Coherence ≥ C_min=%s (C_min=%.6f, min=%.6f)",
562
+ all(coherence_flags),
563
+ float(coherence_threshold),
564
+ min(coherence_values),
565
+ )
566
+
567
+ if frequency_values:
568
+ if frequency_spectrum_min is not None:
569
+ logger.info(
570
+ "[MATH] νf positivity=%s (min=%.6f, spectrum_min=%.6f)",
571
+ all(frequency_flags),
572
+ min(frequency_values),
573
+ frequency_spectrum_min,
574
+ )
575
+ else:
576
+ logger.info(
577
+ "[MATH] νf positivity=%s (min=%.6f)",
578
+ all(frequency_flags),
579
+ min(frequency_values),
580
+ )
581
+
582
+
258
583
  def _log_run_summaries(G: "nx.Graph", args: argparse.Namespace) -> None:
259
584
  cfg_coh = G.graph.get("COHERENCE", METRIC_DEFAULTS["COHERENCE"])
260
585
  cfg_diag = G.graph.get("DIAGNOSIS", METRIC_DEFAULTS["DIAGNOSIS"])
@@ -281,8 +606,12 @@ def _log_run_summaries(G: "nx.Graph", args: argparse.Namespace) -> None:
281
606
  if has_latency_values:
282
607
  logger.info("Average latency: %s", summary["latency_mean"])
283
608
 
609
+ _log_math_engine_summary(G)
610
+
284
611
 
285
612
  def cmd_run(args: argparse.Namespace) -> int:
613
+ """Execute ``tnfr run`` returning the exit status."""
614
+
286
615
  code, graph = _run_cli_program(args)
287
616
  if code != 0:
288
617
  return code
@@ -293,18 +622,18 @@ def cmd_run(args: argparse.Namespace) -> int:
293
622
 
294
623
 
295
624
  def cmd_sequence(args: argparse.Namespace) -> int:
625
+ """Execute ``tnfr sequence`` returning the exit status."""
626
+
296
627
  if args.preset and args.sequence_file:
297
- logger.error(
298
- "Cannot use --preset and --sequence-file at the same time"
299
- )
628
+ logger.error("Cannot use --preset and --sequence-file at the same time")
300
629
  return 1
301
- code, _ = _run_cli_program(
302
- args, default_program=get_preset(CANONICAL_PRESET_NAME)
303
- )
630
+ code, _ = _run_cli_program(args, default_program=get_preset(CANONICAL_PRESET_NAME))
304
631
  return code
305
632
 
306
633
 
307
634
  def cmd_metrics(args: argparse.Namespace) -> int:
635
+ """Execute ``tnfr metrics`` returning the exit status."""
636
+
308
637
  if getattr(args, "steps", None) is None:
309
638
  # Default a longer run for metrics stability
310
639
  args.steps = 200
@@ -320,3 +649,60 @@ def cmd_metrics(args: argparse.Namespace) -> int:
320
649
  else:
321
650
  logger.info("%s", json_dumps(out))
322
651
  return 0
652
+
653
+
654
+ def cmd_profile_si(args: argparse.Namespace) -> int:
655
+ """Execute ``tnfr profile-si`` returning the exit status."""
656
+
657
+ try:
658
+ profile_module = import_module("benchmarks.compute_si_profile")
659
+ except ModuleNotFoundError as exc: # pragma: no cover - optional dependency
660
+ logger.error("Sense Index profiling helpers unavailable: %s", exc)
661
+ return 1
662
+
663
+ profile_compute_si = getattr(profile_module, "profile_compute_si")
664
+
665
+ profile_compute_si(
666
+ node_count=int(args.nodes),
667
+ chord_step=int(args.chord_step),
668
+ loops=int(args.loops),
669
+ output_dir=Path(args.output_dir),
670
+ fmt=str(args.format),
671
+ sort=str(args.sort),
672
+ )
673
+ return 0
674
+
675
+
676
+ def cmd_profile_pipeline(args: argparse.Namespace) -> int:
677
+ """Execute ``tnfr profile-pipeline`` returning the exit status."""
678
+
679
+ try:
680
+ profile_module = import_module("benchmarks.full_pipeline_profile")
681
+ except ModuleNotFoundError as exc: # pragma: no cover - optional dependency
682
+ logger.error("Full pipeline profiling helpers unavailable: %s", exc)
683
+ return 1
684
+
685
+ profile_full_pipeline = getattr(profile_module, "profile_full_pipeline")
686
+
687
+ try:
688
+ si_chunk_sizes = _parse_cli_variants(getattr(args, "si_chunk_sizes", None))
689
+ dnfr_chunk_sizes = _parse_cli_variants(getattr(args, "dnfr_chunk_sizes", None))
690
+ si_workers = _parse_cli_variants(getattr(args, "si_workers", None))
691
+ dnfr_workers = _parse_cli_variants(getattr(args, "dnfr_workers", None))
692
+ except ValueError as exc:
693
+ logger.error("%s", exc)
694
+ return 2
695
+
696
+ profile_full_pipeline(
697
+ node_count=int(args.nodes),
698
+ edge_probability=float(args.edge_probability),
699
+ loops=int(args.loops),
700
+ seed=int(args.seed),
701
+ output_dir=Path(args.output_dir),
702
+ sort=str(args.sort),
703
+ si_chunk_sizes=si_chunk_sizes,
704
+ dnfr_chunk_sizes=dnfr_chunk_sizes,
705
+ si_workers=si_workers,
706
+ dnfr_workers=dnfr_workers,
707
+ )
708
+ return 0