tnfr 6.0.0__py3-none-any.whl → 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (176) hide show
  1. tnfr/__init__.py +50 -5
  2. tnfr/__init__.pyi +0 -7
  3. tnfr/_compat.py +0 -1
  4. tnfr/_generated_version.py +34 -0
  5. tnfr/_version.py +44 -2
  6. tnfr/alias.py +14 -13
  7. tnfr/alias.pyi +5 -37
  8. tnfr/cache.py +9 -729
  9. tnfr/cache.pyi +8 -224
  10. tnfr/callback_utils.py +16 -31
  11. tnfr/callback_utils.pyi +3 -29
  12. tnfr/cli/__init__.py +17 -11
  13. tnfr/cli/__init__.pyi +0 -21
  14. tnfr/cli/arguments.py +175 -14
  15. tnfr/cli/arguments.pyi +5 -11
  16. tnfr/cli/execution.py +434 -48
  17. tnfr/cli/execution.pyi +14 -24
  18. tnfr/cli/utils.py +20 -3
  19. tnfr/cli/utils.pyi +5 -5
  20. tnfr/config/__init__.py +2 -1
  21. tnfr/config/__init__.pyi +2 -0
  22. tnfr/config/feature_flags.py +83 -0
  23. tnfr/config/init.py +1 -1
  24. tnfr/config/operator_names.py +1 -14
  25. tnfr/config/presets.py +6 -26
  26. tnfr/constants/__init__.py +10 -13
  27. tnfr/constants/__init__.pyi +10 -22
  28. tnfr/constants/aliases.py +31 -0
  29. tnfr/constants/core.py +4 -3
  30. tnfr/constants/init.py +1 -1
  31. tnfr/constants/metric.py +3 -3
  32. tnfr/dynamics/__init__.py +64 -10
  33. tnfr/dynamics/__init__.pyi +3 -4
  34. tnfr/dynamics/adaptation.py +79 -13
  35. tnfr/dynamics/aliases.py +10 -9
  36. tnfr/dynamics/coordination.py +77 -35
  37. tnfr/dynamics/dnfr.py +575 -274
  38. tnfr/dynamics/dnfr.pyi +1 -10
  39. tnfr/dynamics/integrators.py +47 -33
  40. tnfr/dynamics/integrators.pyi +0 -1
  41. tnfr/dynamics/runtime.py +489 -129
  42. tnfr/dynamics/sampling.py +2 -0
  43. tnfr/dynamics/selectors.py +101 -62
  44. tnfr/execution.py +15 -8
  45. tnfr/execution.pyi +5 -25
  46. tnfr/flatten.py +7 -3
  47. tnfr/flatten.pyi +1 -8
  48. tnfr/gamma.py +22 -26
  49. tnfr/gamma.pyi +0 -6
  50. tnfr/glyph_history.py +37 -26
  51. tnfr/glyph_history.pyi +1 -19
  52. tnfr/glyph_runtime.py +16 -0
  53. tnfr/glyph_runtime.pyi +9 -0
  54. tnfr/immutable.py +20 -15
  55. tnfr/immutable.pyi +4 -7
  56. tnfr/initialization.py +5 -7
  57. tnfr/initialization.pyi +1 -9
  58. tnfr/io.py +6 -305
  59. tnfr/io.pyi +13 -8
  60. tnfr/mathematics/__init__.py +81 -0
  61. tnfr/mathematics/backend.py +426 -0
  62. tnfr/mathematics/dynamics.py +398 -0
  63. tnfr/mathematics/epi.py +254 -0
  64. tnfr/mathematics/generators.py +222 -0
  65. tnfr/mathematics/metrics.py +119 -0
  66. tnfr/mathematics/operators.py +233 -0
  67. tnfr/mathematics/operators_factory.py +71 -0
  68. tnfr/mathematics/projection.py +78 -0
  69. tnfr/mathematics/runtime.py +173 -0
  70. tnfr/mathematics/spaces.py +247 -0
  71. tnfr/mathematics/transforms.py +292 -0
  72. tnfr/metrics/__init__.py +10 -10
  73. tnfr/metrics/coherence.py +123 -94
  74. tnfr/metrics/common.py +22 -13
  75. tnfr/metrics/common.pyi +42 -11
  76. tnfr/metrics/core.py +72 -14
  77. tnfr/metrics/diagnosis.py +48 -57
  78. tnfr/metrics/diagnosis.pyi +3 -7
  79. tnfr/metrics/export.py +3 -5
  80. tnfr/metrics/glyph_timing.py +41 -31
  81. tnfr/metrics/reporting.py +13 -6
  82. tnfr/metrics/sense_index.py +884 -114
  83. tnfr/metrics/trig.py +167 -11
  84. tnfr/metrics/trig.pyi +1 -0
  85. tnfr/metrics/trig_cache.py +112 -15
  86. tnfr/node.py +400 -17
  87. tnfr/node.pyi +55 -38
  88. tnfr/observers.py +111 -8
  89. tnfr/observers.pyi +0 -15
  90. tnfr/ontosim.py +9 -6
  91. tnfr/ontosim.pyi +0 -5
  92. tnfr/operators/__init__.py +529 -42
  93. tnfr/operators/__init__.pyi +14 -0
  94. tnfr/operators/definitions.py +350 -18
  95. tnfr/operators/definitions.pyi +0 -14
  96. tnfr/operators/grammar.py +760 -0
  97. tnfr/operators/jitter.py +28 -22
  98. tnfr/operators/registry.py +7 -12
  99. tnfr/operators/registry.pyi +0 -2
  100. tnfr/operators/remesh.py +38 -61
  101. tnfr/rng.py +17 -300
  102. tnfr/schemas/__init__.py +8 -0
  103. tnfr/schemas/grammar.json +94 -0
  104. tnfr/selector.py +3 -4
  105. tnfr/selector.pyi +1 -1
  106. tnfr/sense.py +22 -24
  107. tnfr/sense.pyi +0 -7
  108. tnfr/structural.py +504 -21
  109. tnfr/structural.pyi +41 -18
  110. tnfr/telemetry/__init__.py +23 -1
  111. tnfr/telemetry/cache_metrics.py +226 -0
  112. tnfr/telemetry/nu_f.py +423 -0
  113. tnfr/telemetry/nu_f.pyi +123 -0
  114. tnfr/tokens.py +1 -4
  115. tnfr/tokens.pyi +1 -6
  116. tnfr/trace.py +20 -53
  117. tnfr/trace.pyi +9 -37
  118. tnfr/types.py +244 -15
  119. tnfr/types.pyi +200 -14
  120. tnfr/units.py +69 -0
  121. tnfr/units.pyi +16 -0
  122. tnfr/utils/__init__.py +107 -48
  123. tnfr/utils/__init__.pyi +80 -11
  124. tnfr/utils/cache.py +1705 -65
  125. tnfr/utils/cache.pyi +370 -58
  126. tnfr/utils/chunks.py +104 -0
  127. tnfr/utils/chunks.pyi +21 -0
  128. tnfr/utils/data.py +95 -5
  129. tnfr/utils/data.pyi +8 -17
  130. tnfr/utils/graph.py +2 -4
  131. tnfr/utils/init.py +31 -7
  132. tnfr/utils/init.pyi +4 -11
  133. tnfr/utils/io.py +313 -14
  134. tnfr/{helpers → utils}/numeric.py +50 -24
  135. tnfr/utils/numeric.pyi +21 -0
  136. tnfr/validation/__init__.py +92 -4
  137. tnfr/validation/__init__.pyi +77 -17
  138. tnfr/validation/compatibility.py +79 -43
  139. tnfr/validation/compatibility.pyi +4 -6
  140. tnfr/validation/grammar.py +55 -133
  141. tnfr/validation/grammar.pyi +37 -8
  142. tnfr/validation/graph.py +138 -0
  143. tnfr/validation/graph.pyi +17 -0
  144. tnfr/validation/rules.py +161 -74
  145. tnfr/validation/rules.pyi +55 -18
  146. tnfr/validation/runtime.py +263 -0
  147. tnfr/validation/runtime.pyi +31 -0
  148. tnfr/validation/soft_filters.py +170 -0
  149. tnfr/validation/soft_filters.pyi +37 -0
  150. tnfr/validation/spectral.py +159 -0
  151. tnfr/validation/spectral.pyi +46 -0
  152. tnfr/validation/syntax.py +28 -139
  153. tnfr/validation/syntax.pyi +7 -4
  154. tnfr/validation/window.py +39 -0
  155. tnfr/validation/window.pyi +1 -0
  156. tnfr/viz/__init__.py +9 -0
  157. tnfr/viz/matplotlib.py +246 -0
  158. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/METADATA +63 -19
  159. tnfr-7.0.0.dist-info/RECORD +185 -0
  160. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/licenses/LICENSE.md +1 -1
  161. tnfr/constants_glyphs.py +0 -16
  162. tnfr/constants_glyphs.pyi +0 -12
  163. tnfr/grammar.py +0 -25
  164. tnfr/grammar.pyi +0 -13
  165. tnfr/helpers/__init__.py +0 -151
  166. tnfr/helpers/__init__.pyi +0 -66
  167. tnfr/helpers/numeric.pyi +0 -12
  168. tnfr/presets.py +0 -15
  169. tnfr/presets.pyi +0 -7
  170. tnfr/utils/io.pyi +0 -10
  171. tnfr/utils/validators.py +0 -130
  172. tnfr/utils/validators.pyi +0 -19
  173. tnfr-6.0.0.dist-info/RECORD +0 -157
  174. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/WHEEL +0 -0
  175. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/entry_points.txt +0 -0
  176. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/top_level.txt +0 -0
tnfr/utils/io.py CHANGED
@@ -1,26 +1,21 @@
1
- """Serialisation helpers for TNFR, including JSON convenience wrappers."""
1
+ """Structured file and JSON utilities shared across the TNFR engine."""
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from dataclasses import dataclass
6
5
  import json
6
+ import os
7
+ import tempfile
8
+ from dataclasses import dataclass
9
+ from functools import partial
10
+ from pathlib import Path
7
11
  from typing import Any, Callable
8
12
 
9
- from .init import cached_import, get_logger, warn_once
10
-
11
- __all__ = (
12
- "JsonDumpsParams",
13
- "DEFAULT_PARAMS",
14
- "json_dumps",
15
- "clear_orjson_param_warnings",
16
- )
17
-
18
- _ORJSON_PARAMS_MSG = (
19
- "'ensure_ascii', 'separators', 'cls' and extra kwargs are ignored when using orjson: %s"
20
- )
13
+ from .init import LazyImportProxy, cached_import, get_logger, warn_once
21
14
 
22
15
  logger = get_logger(__name__)
23
16
 
17
+ _ORJSON_PARAMS_MSG = "'ensure_ascii', 'separators', 'cls' and extra kwargs are ignored when using orjson: %s"
18
+
24
19
  _warn_ignored_params_once = warn_once(logger, _ORJSON_PARAMS_MSG)
25
20
 
26
21
 
@@ -155,3 +150,307 @@ def json_dumps(
155
150
  if orjson is not None:
156
151
  return _json_dumps_orjson(orjson, obj, params, **kwargs)
157
152
  return _json_dumps_std(obj, params, **kwargs)
153
+
154
+
155
+ def _raise_import_error(name: str, *_: Any, **__: Any) -> Any:
156
+ raise ImportError(f"{name} is not installed")
157
+
158
+
159
+ _MISSING_TOML_ERROR = type(
160
+ "MissingTOMLDependencyError",
161
+ (Exception,),
162
+ {"__doc__": "Fallback error used when tomllib/tomli is missing."},
163
+ )
164
+
165
+ _MISSING_YAML_ERROR = type(
166
+ "MissingPyYAMLDependencyError",
167
+ (Exception,),
168
+ {"__doc__": "Fallback error used when pyyaml is missing."},
169
+ )
170
+
171
+
172
+ def _resolve_lazy(value: Any) -> Any:
173
+ if isinstance(value, LazyImportProxy):
174
+ return value.resolve()
175
+ return value
176
+
177
+
178
+ class _LazyBool:
179
+ __slots__ = ("_value",)
180
+
181
+ def __init__(self, value: Any) -> None:
182
+ self._value = value
183
+
184
+ def __bool__(self) -> bool:
185
+ return _resolve_lazy(self._value) is not None
186
+
187
+
188
+ _TOMLI_MODULE = cached_import("tomli", emit="log", lazy=True)
189
+ tomllib = cached_import(
190
+ "tomllib",
191
+ emit="log",
192
+ lazy=True,
193
+ fallback=_TOMLI_MODULE,
194
+ )
195
+ has_toml = _LazyBool(tomllib)
196
+
197
+
198
+ _TOMLI_TOML_ERROR = cached_import(
199
+ "tomli",
200
+ "TOMLDecodeError",
201
+ emit="log",
202
+ lazy=True,
203
+ fallback=_MISSING_TOML_ERROR,
204
+ )
205
+ TOMLDecodeError = cached_import(
206
+ "tomllib",
207
+ "TOMLDecodeError",
208
+ emit="log",
209
+ lazy=True,
210
+ fallback=_TOMLI_TOML_ERROR,
211
+ )
212
+
213
+
214
+ _TOMLI_LOADS = cached_import(
215
+ "tomli",
216
+ "loads",
217
+ emit="log",
218
+ lazy=True,
219
+ fallback=partial(_raise_import_error, "tomllib/tomli"),
220
+ )
221
+ _TOML_LOADS: Callable[[str], Any] = cached_import(
222
+ "tomllib",
223
+ "loads",
224
+ emit="log",
225
+ lazy=True,
226
+ fallback=_TOMLI_LOADS,
227
+ )
228
+
229
+
230
+ yaml = cached_import("yaml", emit="log", lazy=True)
231
+
232
+
233
+ YAMLError = cached_import(
234
+ "yaml",
235
+ "YAMLError",
236
+ emit="log",
237
+ lazy=True,
238
+ fallback=_MISSING_YAML_ERROR,
239
+ )
240
+
241
+
242
+ _YAML_SAFE_LOAD: Callable[[str], Any] = cached_import(
243
+ "yaml",
244
+ "safe_load",
245
+ emit="log",
246
+ lazy=True,
247
+ fallback=partial(_raise_import_error, "pyyaml"),
248
+ )
249
+
250
+
251
+ def _parse_yaml(text: str) -> Any:
252
+ """Parse YAML ``text`` using ``safe_load`` if available."""
253
+
254
+ return _YAML_SAFE_LOAD(text)
255
+
256
+
257
+ def _parse_toml(text: str) -> Any:
258
+ """Parse TOML ``text`` using ``tomllib`` or ``tomli``."""
259
+
260
+ return _TOML_LOADS(text)
261
+
262
+
263
+ PARSERS = {
264
+ ".json": json.loads,
265
+ ".yaml": _parse_yaml,
266
+ ".yml": _parse_yaml,
267
+ ".toml": _parse_toml,
268
+ }
269
+
270
+
271
+ def _get_parser(suffix: str) -> Callable[[str], Any]:
272
+ try:
273
+ return PARSERS[suffix]
274
+ except KeyError as exc:
275
+ raise ValueError(f"Unsupported suffix: {suffix}") from exc
276
+
277
+
278
+ _BASE_ERROR_MESSAGES: dict[type[BaseException], str] = {
279
+ OSError: "Could not read {path}: {e}",
280
+ UnicodeDecodeError: "Encoding error while reading {path}: {e}",
281
+ json.JSONDecodeError: "Error parsing JSON file at {path}: {e}",
282
+ ImportError: "Missing dependency parsing {path}: {e}",
283
+ }
284
+
285
+
286
+ def _resolve_exception_type(candidate: Any) -> type[BaseException] | None:
287
+ resolved = _resolve_lazy(candidate)
288
+ if isinstance(resolved, type) and issubclass(resolved, BaseException):
289
+ return resolved
290
+ return None
291
+
292
+
293
+ _OPTIONAL_ERROR_MESSAGE_FACTORIES: tuple[
294
+ tuple[Callable[[], type[BaseException] | None], str],
295
+ ...,
296
+ ] = (
297
+ (
298
+ lambda: _resolve_exception_type(YAMLError),
299
+ "Error parsing YAML file at {path}: {e}",
300
+ ),
301
+ (
302
+ lambda: _resolve_exception_type(TOMLDecodeError),
303
+ "Error parsing TOML file at {path}: {e}",
304
+ ),
305
+ )
306
+
307
+
308
+ _BASE_STRUCTURED_EXCEPTIONS = (
309
+ OSError,
310
+ UnicodeDecodeError,
311
+ json.JSONDecodeError,
312
+ ImportError,
313
+ )
314
+
315
+
316
+ def _iter_optional_exceptions() -> list[type[BaseException]]:
317
+ errors: list[type[BaseException]] = []
318
+ for resolver, _ in _OPTIONAL_ERROR_MESSAGE_FACTORIES:
319
+ exc_type = resolver()
320
+ if exc_type is not None:
321
+ errors.append(exc_type)
322
+ return errors
323
+
324
+
325
+ def _is_structured_error(exc: Exception) -> bool:
326
+ if isinstance(exc, _BASE_STRUCTURED_EXCEPTIONS):
327
+ return True
328
+ for optional_exc in _iter_optional_exceptions():
329
+ if isinstance(exc, optional_exc):
330
+ return True
331
+ return False
332
+
333
+
334
+ def _format_structured_file_error(path: Path, e: Exception) -> str:
335
+ for exc, msg in _BASE_ERROR_MESSAGES.items():
336
+ if isinstance(e, exc):
337
+ return msg.format(path=path, e=e)
338
+
339
+ for resolver, msg in _OPTIONAL_ERROR_MESSAGE_FACTORIES:
340
+ exc_type = resolver()
341
+ if exc_type is not None and isinstance(e, exc_type):
342
+ return msg.format(path=path, e=e)
343
+
344
+ return f"Error parsing {path}: {e}"
345
+
346
+
347
+ class StructuredFileError(Exception):
348
+ """Error while reading or parsing a structured file."""
349
+
350
+ def __init__(self, path: Path, original: Exception) -> None:
351
+ super().__init__(_format_structured_file_error(path, original))
352
+ self.path = path
353
+
354
+
355
+ def read_structured_file(path: Path) -> Any:
356
+ """Read a JSON, YAML or TOML file and return parsed data."""
357
+
358
+ suffix = path.suffix.lower()
359
+ try:
360
+ parser = _get_parser(suffix)
361
+ except ValueError as e:
362
+ raise StructuredFileError(path, e) from e
363
+ try:
364
+ text = path.read_text(encoding="utf-8")
365
+ return parser(text)
366
+ except Exception as e:
367
+ if _is_structured_error(e):
368
+ raise StructuredFileError(path, e) from e
369
+ raise
370
+
371
+
372
+ def safe_write(
373
+ path: str | Path,
374
+ write: Callable[[Any], Any],
375
+ *,
376
+ mode: str = "w",
377
+ encoding: str | None = "utf-8",
378
+ atomic: bool = True,
379
+ sync: bool | None = None,
380
+ **open_kwargs: Any,
381
+ ) -> None:
382
+ """Write to ``path`` ensuring parent directory exists and handle errors.
383
+
384
+ Parameters
385
+ ----------
386
+ path:
387
+ Destination file path.
388
+ write:
389
+ Callback receiving the opened file object and performing the actual
390
+ write.
391
+ mode:
392
+ File mode passed to :func:`open`. Text modes (default) use UTF-8
393
+ encoding unless ``encoding`` is ``None``. When a binary mode is used
394
+ (``'b'`` in ``mode``) no encoding parameter is supplied so
395
+ ``write`` may write bytes.
396
+ encoding:
397
+ Encoding for text modes. Ignored for binary modes.
398
+ atomic:
399
+ When ``True`` (default) writes to a temporary file and atomically
400
+ replaces the destination after flushing to disk. When ``False``
401
+ writes directly to ``path`` without any atomicity guarantee.
402
+ sync:
403
+ When ``True`` flushes and fsyncs the file descriptor after writing.
404
+ ``None`` uses ``atomic`` to determine syncing behaviour.
405
+ """
406
+
407
+ path = Path(path)
408
+ path.parent.mkdir(parents=True, exist_ok=True)
409
+ open_params = dict(mode=mode, **open_kwargs)
410
+ if "b" not in mode and encoding is not None:
411
+ open_params["encoding"] = encoding
412
+ if sync is None:
413
+ sync = atomic
414
+ tmp_path: Path | None = None
415
+ try:
416
+ if atomic:
417
+ tmp_fd = tempfile.NamedTemporaryFile(dir=path.parent, delete=False)
418
+ tmp_path = Path(tmp_fd.name)
419
+ tmp_fd.close()
420
+ with open(tmp_path, **open_params) as fd:
421
+ write(fd)
422
+ if sync:
423
+ fd.flush()
424
+ os.fsync(fd.fileno())
425
+ try:
426
+ os.replace(tmp_path, path)
427
+ except OSError as e:
428
+ logger.error(
429
+ "Atomic replace failed for %s -> %s: %s", tmp_path, path, e
430
+ )
431
+ raise
432
+ else:
433
+ with open(path, **open_params) as fd:
434
+ write(fd)
435
+ if sync:
436
+ fd.flush()
437
+ os.fsync(fd.fileno())
438
+ except (OSError, ValueError, TypeError) as e:
439
+ raise type(e)(f"Failed to write file {path}: {e}") from e
440
+ finally:
441
+ if tmp_path is not None:
442
+ tmp_path.unlink(missing_ok=True)
443
+
444
+
445
+ __all__ = (
446
+ "JsonDumpsParams",
447
+ "DEFAULT_PARAMS",
448
+ "clear_orjson_param_warnings",
449
+ "json_dumps",
450
+ "read_structured_file",
451
+ "safe_write",
452
+ "StructuredFileError",
453
+ "TOMLDecodeError",
454
+ "YAMLError",
455
+ )
456
+
@@ -2,8 +2,9 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from collections.abc import Iterable, Sequence
6
5
  import math
6
+ from collections.abc import Iterable, Sequence
7
+ from typing import Any
7
8
 
8
9
  __all__ = (
9
10
  "clamp",
@@ -12,36 +13,31 @@ __all__ = (
12
13
  "similarity_abs",
13
14
  "kahan_sum_nd",
14
15
  "angle_diff",
16
+ "angle_diff_array",
15
17
  )
16
18
 
17
19
 
18
20
  def clamp(x: float, a: float, b: float) -> float:
19
21
  """Return ``x`` clamped to the ``[a, b]`` interval."""
22
+
20
23
  return max(a, min(b, x))
21
24
 
22
25
 
23
26
  def clamp01(x: float) -> float:
24
27
  """Clamp ``x`` to the ``[0,1]`` interval."""
28
+
25
29
  return clamp(float(x), 0.0, 1.0)
26
30
 
27
31
 
28
32
  def within_range(val: float, lower: float, upper: float, tol: float = 1e-9) -> bool:
29
- """Return ``True`` if ``val`` lies in ``[lower, upper]`` within ``tol``.
30
-
31
- The comparison uses absolute differences instead of :func:`math.isclose`.
32
- """
33
+ """Return ``True`` if ``val`` lies in ``[lower, upper]`` within ``tol``."""
33
34
 
34
35
  v = float(val)
35
36
  return lower <= v <= upper or abs(v - lower) <= tol or abs(v - upper) <= tol
36
37
 
37
38
 
38
39
  def _norm01(x: float, lo: float, hi: float) -> float:
39
- """Normalize ``x`` to the unit interval given bounds.
40
-
41
- ``lo`` and ``hi`` delimit the original value range. When ``hi`` is not
42
- greater than ``lo`` the function returns ``0.0`` to avoid division by
43
- zero. The result is clamped to ``[0,1]``.
44
- """
40
+ """Normalize ``x`` to the unit interval given bounds."""
45
41
 
46
42
  if hi <= lo:
47
43
  return 0.0
@@ -49,24 +45,14 @@ def _norm01(x: float, lo: float, hi: float) -> float:
49
45
 
50
46
 
51
47
  def similarity_abs(a: float, b: float, lo: float, hi: float) -> float:
52
- """Return absolute similarity of ``a`` and ``b`` over ``[lo, hi]``.
53
-
54
- It computes ``1`` minus the normalized absolute difference between
55
- ``a`` and ``b``. Values are scaled using :func:`_norm01` so the result
56
- falls within ``[0,1]``.
57
- """
48
+ """Return absolute similarity of ``a`` and ``b`` over ``[lo, hi]``."""
58
49
 
59
50
  return 1.0 - _norm01(abs(float(a) - float(b)), 0.0, hi - lo)
60
51
 
61
52
 
62
- def kahan_sum_nd(
63
- values: Iterable[Sequence[float]], dims: int
64
- ) -> tuple[float, ...]:
65
- """Return compensated sums of ``values`` with ``dims`` components.
53
+ def kahan_sum_nd(values: Iterable[Sequence[float]], dims: int) -> tuple[float, ...]:
54
+ """Return compensated sums of ``values`` with ``dims`` components."""
66
55
 
67
- Each component of the tuples in ``values`` is summed independently using the
68
- Kahan–Babuška (Neumaier) algorithm to reduce floating point error.
69
- """
70
56
  if dims < 1:
71
57
  raise ValueError("dims must be >= 1")
72
58
  totals = [0.0] * dims
@@ -85,4 +71,44 @@ def kahan_sum_nd(
85
71
 
86
72
  def angle_diff(a: float, b: float) -> float:
87
73
  """Return the minimal difference between two angles in radians."""
74
+
88
75
  return (float(a) - float(b) + math.pi) % math.tau - math.pi
76
+
77
+
78
+ def angle_diff_array(
79
+ a: Sequence[float] | "np.ndarray",
80
+ b: Sequence[float] | "np.ndarray",
81
+ *,
82
+ np: Any,
83
+ out: "np.ndarray | None" = None,
84
+ where: "np.ndarray | None" = None,
85
+ ) -> "np.ndarray":
86
+ """Vectorised :func:`angle_diff` compatible with NumPy arrays."""
87
+
88
+ if np is None:
89
+ raise TypeError("angle_diff_array requires a NumPy module")
90
+
91
+ kwargs = {"where": where} if where is not None else {}
92
+ minuend = np.asarray(a, dtype=float)
93
+ subtrahend = np.asarray(b, dtype=float)
94
+ if out is None:
95
+ out = np.empty_like(minuend, dtype=float)
96
+ if where is not None:
97
+ out.fill(0.0)
98
+ else:
99
+ if getattr(out, "shape", None) != minuend.shape:
100
+ raise ValueError("out must match the broadcasted shape of inputs")
101
+
102
+ np.subtract(minuend, subtrahend, out=out, **kwargs)
103
+ np.add(out, math.pi, out=out, **kwargs)
104
+ if where is not None:
105
+ mask = np.asarray(where, dtype=bool)
106
+ if mask.shape != out.shape:
107
+ raise ValueError("where mask must match the broadcasted shape of inputs")
108
+ selected = out[mask]
109
+ if selected.size:
110
+ out[mask] = np.remainder(selected, math.tau)
111
+ else:
112
+ np.remainder(out, math.tau, out=out)
113
+ np.subtract(out, math.pi, out=out, **kwargs)
114
+ return out
tnfr/utils/numeric.pyi ADDED
@@ -0,0 +1,21 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Iterable, Sequence
4
+ from typing import Any
5
+
6
+ __all__: tuple[str, ...]
7
+
8
+ def clamp(x: float, a: float, b: float) -> float: ...
9
+ def clamp01(x: float) -> float: ...
10
+ def within_range(val: float, lower: float, upper: float, tol: float = ...) -> bool: ...
11
+ def similarity_abs(a: float, b: float, lo: float, hi: float) -> float: ...
12
+ def kahan_sum_nd(values: Iterable[Sequence[float]], dims: int) -> tuple[float, ...]: ...
13
+ def angle_diff(a: float, b: float) -> float: ...
14
+ def angle_diff_array(
15
+ a: Sequence[float] | Any,
16
+ b: Sequence[float] | Any,
17
+ *,
18
+ np: Any,
19
+ out: Any | None = ...,
20
+ where: Any | None = ...,
21
+ ) -> Any: ...
@@ -1,25 +1,113 @@
1
- """Validation helpers for TNFR sequences."""
1
+ """Unified validation interface consolidating grammar, graph and spectral checks.
2
+
3
+ This package re-exports the canonical grammar helpers implemented in
4
+ ``tnfr.operators.grammar`` so downstream code can rely on a single import path for
5
+ structural validation primitives.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from dataclasses import dataclass
11
+ from typing import Any, Generic, Mapping, Protocol, TypeVar, runtime_checkable
12
+
13
+ SubjectT = TypeVar("SubjectT")
14
+
15
+
16
+ @dataclass(slots=True)
17
+ class ValidationOutcome(Generic[SubjectT]):
18
+ """Result emitted by all canonical TNFR validators."""
19
+
20
+ subject: SubjectT
21
+ """The validated subject in canonical form."""
22
+
23
+ passed: bool
24
+ """Whether the validation succeeded without invariant violations."""
25
+
26
+ summary: Mapping[str, Any]
27
+ """Structured diagnostics describing the performed checks."""
28
+
29
+ artifacts: Mapping[str, Any] | None = None
30
+ """Optional artefacts (e.g. clamped nodes, normalised vectors)."""
31
+
32
+
33
+ @runtime_checkable
34
+ class Validator(Protocol[SubjectT]):
35
+ """Contract implemented by runtime and spectral validators."""
36
+
37
+ def validate(self, subject: SubjectT, /, **kwargs: Any) -> ValidationOutcome[SubjectT]:
38
+ """Validate ``subject`` returning a :class:`ValidationOutcome`."""
39
+
40
+ def report(self, outcome: "ValidationOutcome[SubjectT]") -> str:
41
+ """Produce a concise textual explanation for ``outcome``."""
42
+
2
43
 
3
44
  from .compatibility import CANON_COMPAT, CANON_FALLBACK
4
- from .grammar import (
45
+ from ..operators.grammar import (
5
46
  GrammarContext,
47
+ MutationPreconditionError,
48
+ RepeatWindowError,
49
+ SequenceValidationResult,
50
+ SequenceSyntaxError,
51
+ StructuralGrammarError,
52
+ TholClosureError,
53
+ TransitionCompatibilityError,
6
54
  apply_glyph_with_grammar,
7
55
  enforce_canonical_grammar,
8
56
  on_applied_glyph,
57
+ record_grammar_violation,
9
58
  )
10
- from .rules import coerce_glyph, glyph_fallback, normalized_dnfr, get_norm
11
- from .syntax import validate_sequence
59
+ from .graph import GRAPH_VALIDATORS, run_validators
60
+ from .window import validate_window
61
+ from .runtime import GraphCanonicalValidator, apply_canonical_clamps, validate_canon
62
+ from .rules import coerce_glyph, get_norm, glyph_fallback, normalized_dnfr
63
+ from .soft_filters import (
64
+ acceleration_norm,
65
+ check_repeats,
66
+ maybe_force,
67
+ soft_grammar_filters,
68
+ )
69
+ from ..operators.grammar import validate_sequence
12
70
 
13
71
  __all__ = (
72
+ "ValidationOutcome",
73
+ "Validator",
14
74
  "validate_sequence",
15
75
  "GrammarContext",
76
+ "StructuralGrammarError",
77
+ "RepeatWindowError",
78
+ "MutationPreconditionError",
79
+ "TholClosureError",
80
+ "TransitionCompatibilityError",
81
+ "SequenceValidationResult",
82
+ "SequenceSyntaxError",
16
83
  "apply_glyph_with_grammar",
17
84
  "enforce_canonical_grammar",
18
85
  "on_applied_glyph",
86
+ "record_grammar_violation",
87
+ "validate_window",
88
+ "run_validators",
89
+ "GRAPH_VALIDATORS",
19
90
  "coerce_glyph",
20
91
  "glyph_fallback",
21
92
  "normalized_dnfr",
22
93
  "get_norm",
94
+ "acceleration_norm",
95
+ "check_repeats",
96
+ "maybe_force",
97
+ "soft_grammar_filters",
23
98
  "CANON_COMPAT",
24
99
  "CANON_FALLBACK",
100
+ "GraphCanonicalValidator",
101
+ "apply_canonical_clamps",
102
+ "validate_canon",
103
+ "NFRValidator",
25
104
  )
105
+
106
+
107
+ def __getattr__(name: str) -> Any:
108
+ if name == "NFRValidator":
109
+ from .spectral import NFRValidator as _NFRValidator
110
+
111
+ return _NFRValidator
112
+ raise AttributeError(name)
113
+
@@ -1,17 +1,77 @@
1
- from typing import Any
2
-
3
- __all__: Any
4
-
5
- def __getattr__(name: str) -> Any: ...
6
-
7
- CANON_COMPAT: Any
8
- CANON_FALLBACK: Any
9
- GrammarContext: Any
10
- apply_glyph_with_grammar: Any
11
- coerce_glyph: Any
12
- enforce_canonical_grammar: Any
13
- get_norm: Any
14
- glyph_fallback: Any
15
- normalized_dnfr: Any
16
- on_applied_glyph: Any
17
- validate_sequence: Any
1
+ from collections.abc import Mapping
2
+ from typing import Any, Generic, Protocol, TypeVar
3
+
4
+ from ..types import Glyph, TNFRGraph
5
+ from .compatibility import CANON_COMPAT as CANON_COMPAT, CANON_FALLBACK as CANON_FALLBACK
6
+ from ..operators.grammar import (
7
+ GrammarContext,
8
+ MutationPreconditionError,
9
+ RepeatWindowError,
10
+ SequenceSyntaxError,
11
+ SequenceValidationResult,
12
+ StructuralGrammarError,
13
+ TholClosureError,
14
+ TransitionCompatibilityError,
15
+ apply_glyph_with_grammar,
16
+ enforce_canonical_grammar,
17
+ on_applied_glyph,
18
+ record_grammar_violation,
19
+ )
20
+ from .graph import GRAPH_VALIDATORS, run_validators
21
+ from .window import validate_window
22
+ from .rules import coerce_glyph, get_norm, glyph_fallback, normalized_dnfr
23
+ from .soft_filters import (acceleration_norm, check_repeats, maybe_force, soft_grammar_filters)
24
+ from .runtime import GraphCanonicalValidator, apply_canonical_clamps, validate_canon
25
+ from .spectral import NFRValidator
26
+ from ..operators.grammar import validate_sequence
27
+
28
+ SubjectT = TypeVar("SubjectT")
29
+
30
+
31
+ class ValidationOutcome(Generic[SubjectT]):
32
+ subject: SubjectT
33
+ passed: bool
34
+ summary: Mapping[str, Any]
35
+ artifacts: Mapping[str, Any] | None
36
+
37
+
38
+ class Validator(Protocol[SubjectT]):
39
+ def validate(self, subject: SubjectT, /, **kwargs: Any) -> ValidationOutcome[SubjectT]: ...
40
+
41
+ def report(self, outcome: ValidationOutcome[SubjectT]) -> str: ...
42
+
43
+
44
+ __all__ = (
45
+ "ValidationOutcome",
46
+ "Validator",
47
+ "validate_sequence",
48
+ "GrammarContext",
49
+ "StructuralGrammarError",
50
+ "RepeatWindowError",
51
+ "MutationPreconditionError",
52
+ "TholClosureError",
53
+ "TransitionCompatibilityError",
54
+ "SequenceSyntaxError",
55
+ "SequenceValidationResult",
56
+ "apply_glyph_with_grammar",
57
+ "enforce_canonical_grammar",
58
+ "on_applied_glyph",
59
+ "record_grammar_violation",
60
+ "validate_window",
61
+ "run_validators",
62
+ "GRAPH_VALIDATORS",
63
+ "coerce_glyph",
64
+ "glyph_fallback",
65
+ "normalized_dnfr",
66
+ "get_norm",
67
+ "acceleration_norm",
68
+ "check_repeats",
69
+ "maybe_force",
70
+ "soft_grammar_filters",
71
+ "CANON_COMPAT",
72
+ "CANON_FALLBACK",
73
+ "GraphCanonicalValidator",
74
+ "apply_canonical_clamps",
75
+ "validate_canon",
76
+ "NFRValidator",
77
+ )