tnfr 4.5.2__py3-none-any.whl → 6.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tnfr/__init__.py +228 -49
- tnfr/__init__.pyi +40 -0
- tnfr/_compat.py +11 -0
- tnfr/_version.py +7 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +106 -21
- tnfr/alias.pyi +140 -0
- tnfr/cache.py +666 -512
- tnfr/cache.pyi +232 -0
- tnfr/callback_utils.py +2 -9
- tnfr/callback_utils.pyi +105 -0
- tnfr/cli/__init__.py +21 -7
- tnfr/cli/__init__.pyi +47 -0
- tnfr/cli/arguments.py +42 -20
- tnfr/cli/arguments.pyi +33 -0
- tnfr/cli/execution.py +54 -20
- tnfr/cli/execution.pyi +80 -0
- tnfr/cli/utils.py +0 -2
- tnfr/cli/utils.pyi +8 -0
- tnfr/config/__init__.py +12 -0
- tnfr/config/__init__.pyi +8 -0
- tnfr/config/constants.py +104 -0
- tnfr/config/constants.pyi +12 -0
- tnfr/{config.py → config/init.py} +11 -7
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +106 -0
- tnfr/config/operator_names.pyi +28 -0
- tnfr/config/presets.py +104 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/constants/__init__.py +78 -24
- tnfr/constants/__init__.pyi +104 -0
- tnfr/constants/core.py +1 -2
- tnfr/constants/core.pyi +17 -0
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +4 -12
- tnfr/constants/metric.pyi +19 -0
- tnfr/constants_glyphs.py +9 -91
- tnfr/constants_glyphs.pyi +12 -0
- tnfr/dynamics/__init__.py +112 -634
- tnfr/dynamics/__init__.pyi +83 -0
- tnfr/dynamics/adaptation.py +201 -0
- tnfr/dynamics/aliases.py +22 -0
- tnfr/dynamics/coordination.py +343 -0
- tnfr/dynamics/dnfr.py +1936 -354
- tnfr/dynamics/dnfr.pyi +33 -0
- tnfr/dynamics/integrators.py +369 -75
- tnfr/dynamics/integrators.pyi +35 -0
- tnfr/dynamics/runtime.py +521 -0
- tnfr/dynamics/sampling.py +8 -5
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +680 -0
- tnfr/execution.py +56 -41
- tnfr/execution.pyi +65 -0
- tnfr/flatten.py +7 -7
- tnfr/flatten.pyi +28 -0
- tnfr/gamma.py +54 -37
- tnfr/gamma.pyi +40 -0
- tnfr/glyph_history.py +85 -38
- tnfr/glyph_history.pyi +53 -0
- tnfr/grammar.py +19 -338
- tnfr/grammar.pyi +13 -0
- tnfr/helpers/__init__.py +110 -30
- tnfr/helpers/__init__.pyi +66 -0
- tnfr/helpers/numeric.py +1 -0
- tnfr/helpers/numeric.pyi +12 -0
- tnfr/immutable.py +55 -19
- tnfr/immutable.pyi +37 -0
- tnfr/initialization.py +12 -10
- tnfr/initialization.pyi +73 -0
- tnfr/io.py +99 -34
- tnfr/io.pyi +11 -0
- tnfr/locking.pyi +7 -0
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/coherence.py +934 -294
- tnfr/metrics/common.py +1 -3
- tnfr/metrics/common.pyi +15 -0
- tnfr/metrics/core.py +192 -34
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +707 -101
- tnfr/metrics/diagnosis.pyi +89 -0
- tnfr/metrics/export.py +27 -13
- tnfr/metrics/glyph_timing.py +218 -38
- tnfr/metrics/reporting.py +22 -18
- tnfr/metrics/reporting.pyi +12 -0
- tnfr/metrics/sense_index.py +199 -25
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +53 -18
- tnfr/metrics/trig.pyi +12 -0
- tnfr/metrics/trig_cache.py +3 -7
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/node.py +148 -125
- tnfr/node.pyi +161 -0
- tnfr/observers.py +44 -30
- tnfr/observers.pyi +46 -0
- tnfr/ontosim.py +14 -13
- tnfr/ontosim.pyi +33 -0
- tnfr/operators/__init__.py +84 -52
- tnfr/operators/__init__.pyi +31 -0
- tnfr/operators/definitions.py +181 -0
- tnfr/operators/definitions.pyi +92 -0
- tnfr/operators/jitter.py +86 -23
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/registry.py +80 -0
- tnfr/operators/registry.pyi +15 -0
- tnfr/operators/remesh.py +141 -57
- tnfr/presets.py +9 -54
- tnfr/presets.pyi +7 -0
- tnfr/py.typed +0 -0
- tnfr/rng.py +259 -73
- tnfr/rng.pyi +14 -0
- tnfr/selector.py +24 -17
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +55 -43
- tnfr/sense.pyi +30 -0
- tnfr/structural.py +44 -267
- tnfr/structural.pyi +46 -0
- tnfr/telemetry/__init__.py +13 -0
- tnfr/telemetry/verbosity.py +37 -0
- tnfr/tokens.py +3 -2
- tnfr/tokens.pyi +41 -0
- tnfr/trace.py +272 -82
- tnfr/trace.pyi +68 -0
- tnfr/types.py +345 -6
- tnfr/types.pyi +145 -0
- tnfr/utils/__init__.py +158 -0
- tnfr/utils/__init__.pyi +133 -0
- tnfr/utils/cache.py +755 -0
- tnfr/utils/cache.pyi +156 -0
- tnfr/{collections_utils.py → utils/data.py} +57 -90
- tnfr/utils/data.pyi +73 -0
- tnfr/utils/graph.py +87 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +746 -0
- tnfr/utils/init.pyi +85 -0
- tnfr/{json_utils.py → utils/io.py} +13 -18
- tnfr/utils/io.pyi +10 -0
- tnfr/utils/validators.py +130 -0
- tnfr/utils/validators.pyi +19 -0
- tnfr/validation/__init__.py +25 -0
- tnfr/validation/__init__.pyi +17 -0
- tnfr/validation/compatibility.py +59 -0
- tnfr/validation/compatibility.pyi +8 -0
- tnfr/validation/grammar.py +149 -0
- tnfr/validation/grammar.pyi +11 -0
- tnfr/validation/rules.py +194 -0
- tnfr/validation/rules.pyi +18 -0
- tnfr/validation/syntax.py +151 -0
- tnfr/validation/syntax.pyi +7 -0
- tnfr-6.0.0.dist-info/METADATA +135 -0
- tnfr-6.0.0.dist-info/RECORD +157 -0
- tnfr/graph_utils.py +0 -84
- tnfr/import_utils.py +0 -228
- tnfr/logging_utils.py +0 -116
- tnfr/validators.py +0 -84
- tnfr/value_utils.py +0 -59
- tnfr-4.5.2.dist-info/METADATA +0 -379
- tnfr-4.5.2.dist-info/RECORD +0 -67
- {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/WHEEL +0 -0
- {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/licenses/LICENSE.md +0 -0
- {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/top_level.txt +0 -0
tnfr/utils/init.pyi
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
from collections.abc import Callable, Iterable, Iterator, Mapping
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Any, Hashable, Literal
|
|
4
|
+
|
|
5
|
+
__all__: tuple[str, ...]
|
|
6
|
+
|
|
7
|
+
def __getattr__(name: str) -> Any: ...
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class WarnOnce:
|
|
11
|
+
def __init__(self, logger: logging.Logger, msg: str, *, maxsize: int = ...) -> None: ...
|
|
12
|
+
def __call__(
|
|
13
|
+
self,
|
|
14
|
+
data: Mapping[Hashable, Any] | Hashable,
|
|
15
|
+
value: Any | None = ...,
|
|
16
|
+
) -> None: ...
|
|
17
|
+
def clear(self) -> None: ...
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class LazyImportProxy:
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
module_name: str,
|
|
24
|
+
attr: str | None,
|
|
25
|
+
emit: Literal["warn", "log", "both"],
|
|
26
|
+
fallback: Any | None,
|
|
27
|
+
) -> None: ...
|
|
28
|
+
def __getattr__(self, name: str) -> Any: ...
|
|
29
|
+
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
|
30
|
+
def __bool__(self) -> bool: ...
|
|
31
|
+
def __iter__(self) -> Iterator[Any]: ...
|
|
32
|
+
def resolve(self) -> Any: ...
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class ImportRegistry:
|
|
36
|
+
limit: int
|
|
37
|
+
failed: Mapping[str, None]
|
|
38
|
+
warned: set[str]
|
|
39
|
+
lock: Any
|
|
40
|
+
|
|
41
|
+
def record_failure(self, key: str, *, module: str | None = ...) -> None: ...
|
|
42
|
+
def discard(self, key: str) -> None: ...
|
|
43
|
+
def mark_warning(self, module: str) -> bool: ...
|
|
44
|
+
def clear(self) -> None: ...
|
|
45
|
+
def __contains__(self, key: str) -> bool: ...
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
EMIT_MAP: Mapping[str, Callable[[str], None]]
|
|
49
|
+
IMPORT_LOG: ImportRegistry
|
|
50
|
+
_IMPORT_STATE: ImportRegistry
|
|
51
|
+
_LOGGING_CONFIGURED: bool
|
|
52
|
+
_DEFAULT_CACHE_SIZE: int
|
|
53
|
+
_FAILED_IMPORT_LIMIT: int
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _configure_root() -> None: ...
|
|
57
|
+
def _reset_import_state() -> None: ...
|
|
58
|
+
def _reset_logging_state() -> None: ...
|
|
59
|
+
def _warn_failure(module: str, attr: str | None, err: Exception) -> None: ...
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def cached_import(
|
|
63
|
+
module_name: str,
|
|
64
|
+
attr: str | None = ...,
|
|
65
|
+
*,
|
|
66
|
+
fallback: Any | None = ...,
|
|
67
|
+
emit: Literal["warn", "log", "both"] = ...,
|
|
68
|
+
lazy: bool = ...,
|
|
69
|
+
) -> Any | None: ...
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def warm_cached_import(
|
|
73
|
+
module: str | tuple[str, str | None] | Iterable[str | tuple[str, str | None]],
|
|
74
|
+
*extra: str | tuple[str, str | None],
|
|
75
|
+
attr: str | None = ...,
|
|
76
|
+
fallback: Any | None = ...,
|
|
77
|
+
emit: Literal["warn", "log", "both"] = ...,
|
|
78
|
+
lazy: bool = ...,
|
|
79
|
+
resolve: bool = ...,
|
|
80
|
+
) -> Any | dict[str, Any | None]: ...
|
|
81
|
+
def get_logger(name: str) -> logging.Logger: ...
|
|
82
|
+
def get_numpy() -> Any | None: ...
|
|
83
|
+
def get_nodenx() -> Any | None: ...
|
|
84
|
+
def prune_failed_imports(*modules: str) -> None: ...
|
|
85
|
+
def warn_once(logger: logging.Logger, msg: str, *, maxsize: int = ...) -> WarnOnce: ...
|
|
@@ -1,11 +1,4 @@
|
|
|
1
|
-
"""
|
|
2
|
-
|
|
3
|
-
This module lazily imports :mod:`orjson` on first use of :func:`json_dumps`.
|
|
4
|
-
The fast serializer is brought in through
|
|
5
|
-
``tnfr.import_utils.cached_import``; its cache and failure registry can be
|
|
6
|
-
reset using ``cached_import.cache_clear()`` and
|
|
7
|
-
:func:`tnfr.import_utils.prune_failed_imports`.
|
|
8
|
-
"""
|
|
1
|
+
"""Serialisation helpers for TNFR, including JSON convenience wrappers."""
|
|
9
2
|
|
|
10
3
|
from __future__ import annotations
|
|
11
4
|
|
|
@@ -13,8 +6,14 @@ from dataclasses import dataclass
|
|
|
13
6
|
import json
|
|
14
7
|
from typing import Any, Callable
|
|
15
8
|
|
|
16
|
-
from .
|
|
17
|
-
|
|
9
|
+
from .init import cached_import, get_logger, warn_once
|
|
10
|
+
|
|
11
|
+
__all__ = (
|
|
12
|
+
"JsonDumpsParams",
|
|
13
|
+
"DEFAULT_PARAMS",
|
|
14
|
+
"json_dumps",
|
|
15
|
+
"clear_orjson_param_warnings",
|
|
16
|
+
)
|
|
18
17
|
|
|
19
18
|
_ORJSON_PARAMS_MSG = (
|
|
20
19
|
"'ensure_ascii', 'separators', 'cls' and extra kwargs are ignored when using orjson: %s"
|
|
@@ -33,6 +32,7 @@ def clear_orjson_param_warnings() -> None:
|
|
|
33
32
|
|
|
34
33
|
def _format_ignored_params(combo: frozenset[str]) -> str:
|
|
35
34
|
"""Return a stable representation for ignored parameter combinations."""
|
|
35
|
+
|
|
36
36
|
return "{" + ", ".join(map(repr, sorted(combo))) + "}"
|
|
37
37
|
|
|
38
38
|
|
|
@@ -91,6 +91,7 @@ def _json_dumps_std(
|
|
|
91
91
|
**kwargs: Any,
|
|
92
92
|
) -> bytes | str:
|
|
93
93
|
"""Serialize using the standard library :func:`json.dumps`."""
|
|
94
|
+
|
|
94
95
|
result = json.dumps(
|
|
95
96
|
obj,
|
|
96
97
|
sort_keys=params.sort_keys,
|
|
@@ -114,14 +115,8 @@ def json_dumps(
|
|
|
114
115
|
to_bytes: bool = False,
|
|
115
116
|
**kwargs: Any,
|
|
116
117
|
) -> bytes | str:
|
|
117
|
-
"""Serialize ``obj`` to JSON using ``orjson`` when available.
|
|
118
|
-
|
|
119
|
-
Returns a ``str`` by default. Pass ``to_bytes=True`` to obtain a ``bytes``
|
|
120
|
-
result. When :mod:`orjson` is used, the ``ensure_ascii``, ``separators``,
|
|
121
|
-
``cls`` and any additional keyword arguments are ignored because they are
|
|
122
|
-
not supported by :func:`orjson.dumps`. A warning is emitted whenever such
|
|
123
|
-
ignored parameters are detected.
|
|
124
|
-
"""
|
|
118
|
+
"""Serialize ``obj`` to JSON using ``orjson`` when available."""
|
|
119
|
+
|
|
125
120
|
if not isinstance(sort_keys, bool):
|
|
126
121
|
raise TypeError("sort_keys must be a boolean")
|
|
127
122
|
if default is not None and not callable(default):
|
tnfr/utils/io.pyi
ADDED
tnfr/utils/validators.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
"""Validation utilities for TNFR graph structures."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import numbers
|
|
6
|
+
import sys
|
|
7
|
+
|
|
8
|
+
from collections.abc import Mapping
|
|
9
|
+
from typing import Callable, Sequence
|
|
10
|
+
|
|
11
|
+
from .._compat import TypeAlias
|
|
12
|
+
|
|
13
|
+
from ..alias import get_attr
|
|
14
|
+
from ..config.constants import GLYPHS_CANONICAL_SET
|
|
15
|
+
from ..constants import get_aliases, get_param
|
|
16
|
+
from ..helpers.numeric import within_range
|
|
17
|
+
from ..types import (
|
|
18
|
+
EPIValue,
|
|
19
|
+
NodeId,
|
|
20
|
+
StructuralFrequency,
|
|
21
|
+
TNFRGraph,
|
|
22
|
+
)
|
|
23
|
+
ALIAS_EPI = get_aliases("EPI")
|
|
24
|
+
ALIAS_VF = get_aliases("VF")
|
|
25
|
+
|
|
26
|
+
ValidatorFunc: TypeAlias = Callable[[TNFRGraph], None]
|
|
27
|
+
"""Callable signature expected for validation routines."""
|
|
28
|
+
|
|
29
|
+
NodeData = Mapping[str, object]
|
|
30
|
+
"""Read-only node attribute mapping used by validators."""
|
|
31
|
+
|
|
32
|
+
AliasSequence = Sequence[str]
|
|
33
|
+
"""Sequence of accepted attribute aliases."""
|
|
34
|
+
|
|
35
|
+
__all__ = ("validate_window", "run_validators")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def validate_window(window: int, *, positive: bool = False) -> int:
|
|
39
|
+
"""Validate ``window`` as an ``int`` and return it.
|
|
40
|
+
|
|
41
|
+
Non-integer values raise :class:`TypeError`. When ``positive`` is ``True``
|
|
42
|
+
the value must be strictly greater than zero; otherwise it may be zero.
|
|
43
|
+
Negative values always raise :class:`ValueError`.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
if isinstance(window, bool) or not isinstance(window, numbers.Integral):
|
|
47
|
+
raise TypeError("'window' must be an integer")
|
|
48
|
+
if window < 0 or (positive and window == 0):
|
|
49
|
+
kind = "positive" if positive else "non-negative"
|
|
50
|
+
raise ValueError(f"'window'={window} must be {kind}")
|
|
51
|
+
return int(window)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _require_attr(data: NodeData, alias: AliasSequence, node: NodeId, name: str) -> float:
|
|
55
|
+
"""Return attribute value or raise if missing."""
|
|
56
|
+
|
|
57
|
+
mapping: dict[str, object]
|
|
58
|
+
if isinstance(data, dict):
|
|
59
|
+
mapping = data
|
|
60
|
+
else:
|
|
61
|
+
mapping = dict(data)
|
|
62
|
+
val = get_attr(mapping, alias, None)
|
|
63
|
+
if val is None:
|
|
64
|
+
raise ValueError(f"Missing {name} attribute in node {node}")
|
|
65
|
+
return float(val)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def _validate_sigma(graph: TNFRGraph) -> None:
|
|
69
|
+
from ..sense import sigma_vector_from_graph
|
|
70
|
+
|
|
71
|
+
sv = sigma_vector_from_graph(graph)
|
|
72
|
+
if sv.get("mag", 0.0) > 1.0 + sys.float_info.epsilon:
|
|
73
|
+
raise ValueError("σ norm exceeds 1")
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
GRAPH_VALIDATORS: tuple[ValidatorFunc, ...] = (_validate_sigma,)
|
|
77
|
+
"""Ordered collection of graph-level validators."""
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _check_epi_vf(
|
|
81
|
+
epi: EPIValue,
|
|
82
|
+
vf: StructuralFrequency,
|
|
83
|
+
epi_min: float,
|
|
84
|
+
epi_max: float,
|
|
85
|
+
vf_min: float,
|
|
86
|
+
vf_max: float,
|
|
87
|
+
node: NodeId,
|
|
88
|
+
) -> None:
|
|
89
|
+
_check_range(epi, epi_min, epi_max, "EPI", node)
|
|
90
|
+
_check_range(vf, vf_min, vf_max, "VF", node)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _out_of_range_msg(name: str, node: NodeId, val: float) -> str:
|
|
94
|
+
return f"{name} out of range in node {node}: {val}"
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def _check_range(
|
|
98
|
+
val: float,
|
|
99
|
+
lower: float,
|
|
100
|
+
upper: float,
|
|
101
|
+
name: str,
|
|
102
|
+
node: NodeId,
|
|
103
|
+
tol: float = 1e-9,
|
|
104
|
+
) -> None:
|
|
105
|
+
if not within_range(val, lower, upper, tol):
|
|
106
|
+
raise ValueError(_out_of_range_msg(name, node, val))
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _check_glyph(glyph: str | None, node: NodeId) -> None:
|
|
110
|
+
if glyph and glyph not in GLYPHS_CANONICAL_SET:
|
|
111
|
+
raise KeyError(f"Invalid glyph {glyph} in node {node}")
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def run_validators(graph: TNFRGraph) -> None:
|
|
115
|
+
"""Run all invariant validators on ``G`` with a single node pass."""
|
|
116
|
+
from ..glyph_history import last_glyph
|
|
117
|
+
|
|
118
|
+
epi_min = float(get_param(graph, "EPI_MIN"))
|
|
119
|
+
epi_max = float(get_param(graph, "EPI_MAX"))
|
|
120
|
+
vf_min = float(get_param(graph, "VF_MIN"))
|
|
121
|
+
vf_max = float(get_param(graph, "VF_MAX"))
|
|
122
|
+
|
|
123
|
+
for node, data in graph.nodes(data=True):
|
|
124
|
+
epi = EPIValue(_require_attr(data, ALIAS_EPI, node, "EPI"))
|
|
125
|
+
vf = StructuralFrequency(_require_attr(data, ALIAS_VF, node, "VF"))
|
|
126
|
+
_check_epi_vf(epi, vf, epi_min, epi_max, vf_min, vf_max, node)
|
|
127
|
+
_check_glyph(last_glyph(data), node)
|
|
128
|
+
|
|
129
|
+
for validator in GRAPH_VALIDATORS:
|
|
130
|
+
validator(graph)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from collections.abc import Mapping
|
|
2
|
+
from typing import Callable, Sequence
|
|
3
|
+
|
|
4
|
+
from ..types import TNFRGraph
|
|
5
|
+
from .._compat import TypeAlias
|
|
6
|
+
|
|
7
|
+
ValidatorFunc: TypeAlias = Callable[[TNFRGraph], None]
|
|
8
|
+
NodeData: TypeAlias = Mapping[str, object]
|
|
9
|
+
AliasSequence: TypeAlias = Sequence[str]
|
|
10
|
+
|
|
11
|
+
__all__: tuple[str, ...]
|
|
12
|
+
|
|
13
|
+
def __getattr__(name: str) -> object: ...
|
|
14
|
+
|
|
15
|
+
def validate_window(window: int, *, positive: bool = ...) -> int: ...
|
|
16
|
+
|
|
17
|
+
def run_validators(graph: TNFRGraph) -> None: ...
|
|
18
|
+
|
|
19
|
+
GRAPH_VALIDATORS: tuple[ValidatorFunc, ...]
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""Validation helpers for TNFR sequences."""
|
|
2
|
+
|
|
3
|
+
from .compatibility import CANON_COMPAT, CANON_FALLBACK
|
|
4
|
+
from .grammar import (
|
|
5
|
+
GrammarContext,
|
|
6
|
+
apply_glyph_with_grammar,
|
|
7
|
+
enforce_canonical_grammar,
|
|
8
|
+
on_applied_glyph,
|
|
9
|
+
)
|
|
10
|
+
from .rules import coerce_glyph, glyph_fallback, normalized_dnfr, get_norm
|
|
11
|
+
from .syntax import validate_sequence
|
|
12
|
+
|
|
13
|
+
__all__ = (
|
|
14
|
+
"validate_sequence",
|
|
15
|
+
"GrammarContext",
|
|
16
|
+
"apply_glyph_with_grammar",
|
|
17
|
+
"enforce_canonical_grammar",
|
|
18
|
+
"on_applied_glyph",
|
|
19
|
+
"coerce_glyph",
|
|
20
|
+
"glyph_fallback",
|
|
21
|
+
"normalized_dnfr",
|
|
22
|
+
"get_norm",
|
|
23
|
+
"CANON_COMPAT",
|
|
24
|
+
"CANON_FALLBACK",
|
|
25
|
+
)
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
__all__: Any
|
|
4
|
+
|
|
5
|
+
def __getattr__(name: str) -> Any: ...
|
|
6
|
+
|
|
7
|
+
CANON_COMPAT: Any
|
|
8
|
+
CANON_FALLBACK: Any
|
|
9
|
+
GrammarContext: Any
|
|
10
|
+
apply_glyph_with_grammar: Any
|
|
11
|
+
coerce_glyph: Any
|
|
12
|
+
enforce_canonical_grammar: Any
|
|
13
|
+
get_norm: Any
|
|
14
|
+
glyph_fallback: Any
|
|
15
|
+
normalized_dnfr: Any
|
|
16
|
+
on_applied_glyph: Any
|
|
17
|
+
validate_sequence: Any
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""Canonical TNFR compatibility tables.
|
|
2
|
+
|
|
3
|
+
This module centralises the canonical transitions that keep TNFR
|
|
4
|
+
coherence consistent across refactors. It is intentionally lightweight
|
|
5
|
+
so :mod:`tnfr.validation.grammar` can depend on it without introducing
|
|
6
|
+
cycles.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
from ..types import Glyph
|
|
12
|
+
|
|
13
|
+
__all__ = ["CANON_COMPAT", "CANON_FALLBACK"]
|
|
14
|
+
|
|
15
|
+
# Canonical compatibilities (allowed next glyphs)
|
|
16
|
+
CANON_COMPAT: dict[Glyph, set[Glyph]] = {
|
|
17
|
+
# Opening / initiation
|
|
18
|
+
Glyph.AL: {Glyph.EN, Glyph.RA, Glyph.NAV, Glyph.VAL, Glyph.UM},
|
|
19
|
+
Glyph.EN: {Glyph.IL, Glyph.UM, Glyph.RA, Glyph.NAV},
|
|
20
|
+
# Stabilisation / diffusion / coupling
|
|
21
|
+
Glyph.IL: {Glyph.RA, Glyph.VAL, Glyph.UM, Glyph.SHA},
|
|
22
|
+
Glyph.UM: {Glyph.RA, Glyph.IL, Glyph.VAL, Glyph.NAV},
|
|
23
|
+
Glyph.RA: {Glyph.IL, Glyph.VAL, Glyph.UM, Glyph.NAV},
|
|
24
|
+
Glyph.VAL: {Glyph.UM, Glyph.RA, Glyph.IL, Glyph.NAV},
|
|
25
|
+
# Dissonance → transition → mutation
|
|
26
|
+
Glyph.OZ: {Glyph.ZHIR, Glyph.NAV},
|
|
27
|
+
Glyph.ZHIR: {Glyph.IL, Glyph.NAV},
|
|
28
|
+
Glyph.NAV: {Glyph.OZ, Glyph.ZHIR, Glyph.RA, Glyph.IL, Glyph.UM},
|
|
29
|
+
# Closures / latent states
|
|
30
|
+
Glyph.SHA: {Glyph.AL, Glyph.EN},
|
|
31
|
+
Glyph.NUL: {Glyph.AL, Glyph.IL},
|
|
32
|
+
# Self-organising blocks
|
|
33
|
+
Glyph.THOL: {
|
|
34
|
+
Glyph.OZ,
|
|
35
|
+
Glyph.ZHIR,
|
|
36
|
+
Glyph.NAV,
|
|
37
|
+
Glyph.RA,
|
|
38
|
+
Glyph.IL,
|
|
39
|
+
Glyph.UM,
|
|
40
|
+
Glyph.SHA,
|
|
41
|
+
Glyph.NUL,
|
|
42
|
+
},
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
# Canonical fallbacks when a transition is not allowed
|
|
46
|
+
CANON_FALLBACK: dict[Glyph, Glyph] = {
|
|
47
|
+
Glyph.AL: Glyph.EN,
|
|
48
|
+
Glyph.EN: Glyph.IL,
|
|
49
|
+
Glyph.IL: Glyph.RA,
|
|
50
|
+
Glyph.NAV: Glyph.RA,
|
|
51
|
+
Glyph.NUL: Glyph.AL,
|
|
52
|
+
Glyph.OZ: Glyph.ZHIR,
|
|
53
|
+
Glyph.RA: Glyph.IL,
|
|
54
|
+
Glyph.SHA: Glyph.AL,
|
|
55
|
+
Glyph.THOL: Glyph.NAV,
|
|
56
|
+
Glyph.UM: Glyph.RA,
|
|
57
|
+
Glyph.VAL: Glyph.RA,
|
|
58
|
+
Glyph.ZHIR: Glyph.IL,
|
|
59
|
+
}
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
"""Canonical grammar enforcement utilities."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Iterable
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from typing import Any, Optional, TYPE_CHECKING, cast
|
|
8
|
+
|
|
9
|
+
from ..constants import DEFAULTS, get_param
|
|
10
|
+
from ..operators import apply_glyph
|
|
11
|
+
from ..types import Glyph, NodeId, TNFRGraph
|
|
12
|
+
from .compatibility import CANON_COMPAT
|
|
13
|
+
from . import rules as _rules
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING: # pragma: no cover - typing only
|
|
16
|
+
from ..node import NodeProtocol
|
|
17
|
+
|
|
18
|
+
__all__ = [
|
|
19
|
+
"GrammarContext",
|
|
20
|
+
"_gram_state",
|
|
21
|
+
"enforce_canonical_grammar",
|
|
22
|
+
"on_applied_glyph",
|
|
23
|
+
"apply_glyph_with_grammar",
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class GrammarContext:
|
|
29
|
+
"""Shared context for grammar helpers.
|
|
30
|
+
|
|
31
|
+
Collects graph-level settings to reduce positional parameters across
|
|
32
|
+
helper functions.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
G: TNFRGraph
|
|
36
|
+
cfg_soft: dict[str, Any]
|
|
37
|
+
cfg_canon: dict[str, Any]
|
|
38
|
+
norms: dict[str, Any]
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def from_graph(cls, G: TNFRGraph) -> "GrammarContext":
|
|
42
|
+
"""Create a :class:`GrammarContext` for ``G``."""
|
|
43
|
+
|
|
44
|
+
return cls(
|
|
45
|
+
G=G,
|
|
46
|
+
cfg_soft=G.graph.get("GRAMMAR", DEFAULTS.get("GRAMMAR", {})),
|
|
47
|
+
cfg_canon=G.graph.get(
|
|
48
|
+
"GRAMMAR_CANON", DEFAULTS.get("GRAMMAR_CANON", {})
|
|
49
|
+
),
|
|
50
|
+
norms=G.graph.get("_sel_norms") or {},
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
# -------------------------
|
|
55
|
+
# Per-node grammar state
|
|
56
|
+
# -------------------------
|
|
57
|
+
|
|
58
|
+
def _gram_state(nd: dict[str, Any]) -> dict[str, Any]:
|
|
59
|
+
"""Create or return the node grammar state."""
|
|
60
|
+
|
|
61
|
+
return nd.setdefault("_GRAM", {"thol_open": False, "thol_len": 0})
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
# -------------------------
|
|
65
|
+
# Core: enforce grammar on a candidate
|
|
66
|
+
# -------------------------
|
|
67
|
+
|
|
68
|
+
def enforce_canonical_grammar(
|
|
69
|
+
G: TNFRGraph,
|
|
70
|
+
n: NodeId,
|
|
71
|
+
cand: Glyph | str,
|
|
72
|
+
ctx: Optional[GrammarContext] = None,
|
|
73
|
+
) -> Glyph | str:
|
|
74
|
+
"""Validate and adjust a candidate glyph according to canonical grammar."""
|
|
75
|
+
|
|
76
|
+
if ctx is None:
|
|
77
|
+
ctx = GrammarContext.from_graph(G)
|
|
78
|
+
|
|
79
|
+
nd = ctx.G.nodes[n]
|
|
80
|
+
st = _gram_state(nd)
|
|
81
|
+
|
|
82
|
+
raw_cand = cand
|
|
83
|
+
cand = _rules.coerce_glyph(cand)
|
|
84
|
+
input_was_str = isinstance(raw_cand, str)
|
|
85
|
+
|
|
86
|
+
# 0) If glyphs outside the alphabet arrive, leave untouched
|
|
87
|
+
if not isinstance(cand, Glyph) or cand not in CANON_COMPAT:
|
|
88
|
+
return raw_cand if input_was_str else cand
|
|
89
|
+
|
|
90
|
+
original = cand
|
|
91
|
+
cand = _rules._check_repeats(ctx, n, cand)
|
|
92
|
+
|
|
93
|
+
cand = _rules._maybe_force(ctx, n, cand, original, _rules.normalized_dnfr, "force_dnfr")
|
|
94
|
+
cand = _rules._maybe_force(ctx, n, cand, original, _rules._accel_norm, "force_accel")
|
|
95
|
+
cand = _rules._check_oz_to_zhir(ctx, n, cand)
|
|
96
|
+
cand = _rules._check_thol_closure(ctx, n, cand, st)
|
|
97
|
+
cand = _rules._check_compatibility(ctx, n, cand)
|
|
98
|
+
|
|
99
|
+
coerced_final = _rules.coerce_glyph(cand)
|
|
100
|
+
if input_was_str:
|
|
101
|
+
if isinstance(coerced_final, Glyph):
|
|
102
|
+
return coerced_final.value
|
|
103
|
+
return str(cand)
|
|
104
|
+
return coerced_final if isinstance(coerced_final, Glyph) else cand
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
# -------------------------
|
|
108
|
+
# Post-selection: update grammar state
|
|
109
|
+
# -------------------------
|
|
110
|
+
|
|
111
|
+
def on_applied_glyph(G: TNFRGraph, n: NodeId, applied: Glyph | str) -> None:
|
|
112
|
+
nd = G.nodes[n]
|
|
113
|
+
st = _gram_state(nd)
|
|
114
|
+
try:
|
|
115
|
+
glyph = applied if isinstance(applied, Glyph) else Glyph(str(applied))
|
|
116
|
+
except ValueError:
|
|
117
|
+
glyph = None
|
|
118
|
+
|
|
119
|
+
if glyph is Glyph.THOL:
|
|
120
|
+
st["thol_open"] = True
|
|
121
|
+
st["thol_len"] = 0
|
|
122
|
+
elif glyph in (Glyph.SHA, Glyph.NUL):
|
|
123
|
+
st["thol_open"] = False
|
|
124
|
+
st["thol_len"] = 0
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
# -------------------------
|
|
128
|
+
# Direct application with canonical grammar
|
|
129
|
+
# -------------------------
|
|
130
|
+
|
|
131
|
+
def apply_glyph_with_grammar(
|
|
132
|
+
G: TNFRGraph,
|
|
133
|
+
nodes: Optional[Iterable[NodeId | "NodeProtocol"]],
|
|
134
|
+
glyph: Glyph | str,
|
|
135
|
+
window: Optional[int] = None,
|
|
136
|
+
) -> None:
|
|
137
|
+
"""Apply ``glyph`` to ``nodes`` enforcing the canonical grammar."""
|
|
138
|
+
|
|
139
|
+
if window is None:
|
|
140
|
+
window = get_param(G, "GLYPH_HYSTERESIS_WINDOW")
|
|
141
|
+
|
|
142
|
+
g_str = glyph.value if isinstance(glyph, Glyph) else str(glyph)
|
|
143
|
+
iter_nodes = G.nodes() if nodes is None else nodes
|
|
144
|
+
ctx = GrammarContext.from_graph(G)
|
|
145
|
+
for node_ref in iter_nodes:
|
|
146
|
+
node_id = cast(NodeId, getattr(node_ref, "n", node_ref))
|
|
147
|
+
g_eff = enforce_canonical_grammar(G, node_id, g_str, ctx)
|
|
148
|
+
apply_glyph(G, node_id, g_eff, window=window)
|
|
149
|
+
on_applied_glyph(G, node_id, g_eff)
|