tnfr 4.5.1__py3-none-any.whl → 6.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tnfr/__init__.py +270 -90
- tnfr/__init__.pyi +40 -0
- tnfr/_compat.py +11 -0
- tnfr/_version.py +7 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +631 -0
- tnfr/alias.pyi +140 -0
- tnfr/cache.py +732 -0
- tnfr/cache.pyi +232 -0
- tnfr/callback_utils.py +381 -0
- tnfr/callback_utils.pyi +105 -0
- tnfr/cli/__init__.py +89 -0
- tnfr/cli/__init__.pyi +47 -0
- tnfr/cli/arguments.py +199 -0
- tnfr/cli/arguments.pyi +33 -0
- tnfr/cli/execution.py +322 -0
- tnfr/cli/execution.pyi +80 -0
- tnfr/cli/utils.py +34 -0
- tnfr/cli/utils.pyi +8 -0
- tnfr/config/__init__.py +12 -0
- tnfr/config/__init__.pyi +8 -0
- tnfr/config/constants.py +104 -0
- tnfr/config/constants.pyi +12 -0
- tnfr/config/init.py +36 -0
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +106 -0
- tnfr/config/operator_names.pyi +28 -0
- tnfr/config/presets.py +104 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/constants/__init__.py +228 -0
- tnfr/constants/__init__.pyi +104 -0
- tnfr/constants/core.py +158 -0
- tnfr/constants/core.pyi +17 -0
- tnfr/constants/init.py +31 -0
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +102 -0
- tnfr/constants/metric.pyi +19 -0
- tnfr/constants_glyphs.py +16 -0
- tnfr/constants_glyphs.pyi +12 -0
- tnfr/dynamics/__init__.py +136 -0
- tnfr/dynamics/__init__.pyi +83 -0
- tnfr/dynamics/adaptation.py +201 -0
- tnfr/dynamics/aliases.py +22 -0
- tnfr/dynamics/coordination.py +343 -0
- tnfr/dynamics/dnfr.py +2315 -0
- tnfr/dynamics/dnfr.pyi +33 -0
- tnfr/dynamics/integrators.py +561 -0
- tnfr/dynamics/integrators.pyi +35 -0
- tnfr/dynamics/runtime.py +521 -0
- tnfr/dynamics/sampling.py +34 -0
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +680 -0
- tnfr/execution.py +216 -0
- tnfr/execution.pyi +65 -0
- tnfr/flatten.py +283 -0
- tnfr/flatten.pyi +28 -0
- tnfr/gamma.py +320 -89
- tnfr/gamma.pyi +40 -0
- tnfr/glyph_history.py +337 -0
- tnfr/glyph_history.pyi +53 -0
- tnfr/grammar.py +23 -153
- tnfr/grammar.pyi +13 -0
- tnfr/helpers/__init__.py +151 -0
- tnfr/helpers/__init__.pyi +66 -0
- tnfr/helpers/numeric.py +88 -0
- tnfr/helpers/numeric.pyi +12 -0
- tnfr/immutable.py +214 -0
- tnfr/immutable.pyi +37 -0
- tnfr/initialization.py +199 -0
- tnfr/initialization.pyi +73 -0
- tnfr/io.py +311 -0
- tnfr/io.pyi +11 -0
- tnfr/locking.py +37 -0
- tnfr/locking.pyi +7 -0
- tnfr/metrics/__init__.py +41 -0
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/coherence.py +1469 -0
- tnfr/metrics/common.py +149 -0
- tnfr/metrics/common.pyi +15 -0
- tnfr/metrics/core.py +259 -0
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +840 -0
- tnfr/metrics/diagnosis.pyi +89 -0
- tnfr/metrics/export.py +151 -0
- tnfr/metrics/glyph_timing.py +369 -0
- tnfr/metrics/reporting.py +152 -0
- tnfr/metrics/reporting.pyi +12 -0
- tnfr/metrics/sense_index.py +294 -0
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +216 -0
- tnfr/metrics/trig.pyi +12 -0
- tnfr/metrics/trig_cache.py +105 -0
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/node.py +255 -177
- tnfr/node.pyi +161 -0
- tnfr/observers.py +154 -150
- tnfr/observers.pyi +46 -0
- tnfr/ontosim.py +135 -134
- tnfr/ontosim.pyi +33 -0
- tnfr/operators/__init__.py +452 -0
- tnfr/operators/__init__.pyi +31 -0
- tnfr/operators/definitions.py +181 -0
- tnfr/operators/definitions.pyi +92 -0
- tnfr/operators/jitter.py +266 -0
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/registry.py +80 -0
- tnfr/operators/registry.pyi +15 -0
- tnfr/operators/remesh.py +569 -0
- tnfr/presets.py +10 -23
- tnfr/presets.pyi +7 -0
- tnfr/py.typed +0 -0
- tnfr/rng.py +440 -0
- tnfr/rng.pyi +14 -0
- tnfr/selector.py +217 -0
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +307 -142
- tnfr/sense.pyi +30 -0
- tnfr/structural.py +69 -164
- tnfr/structural.pyi +46 -0
- tnfr/telemetry/__init__.py +13 -0
- tnfr/telemetry/verbosity.py +37 -0
- tnfr/tokens.py +61 -0
- tnfr/tokens.pyi +41 -0
- tnfr/trace.py +520 -95
- tnfr/trace.pyi +68 -0
- tnfr/types.py +382 -17
- tnfr/types.pyi +145 -0
- tnfr/utils/__init__.py +158 -0
- tnfr/utils/__init__.pyi +133 -0
- tnfr/utils/cache.py +755 -0
- tnfr/utils/cache.pyi +156 -0
- tnfr/utils/data.py +267 -0
- tnfr/utils/data.pyi +73 -0
- tnfr/utils/graph.py +87 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +746 -0
- tnfr/utils/init.pyi +85 -0
- tnfr/utils/io.py +157 -0
- tnfr/utils/io.pyi +10 -0
- tnfr/utils/validators.py +130 -0
- tnfr/utils/validators.pyi +19 -0
- tnfr/validation/__init__.py +25 -0
- tnfr/validation/__init__.pyi +17 -0
- tnfr/validation/compatibility.py +59 -0
- tnfr/validation/compatibility.pyi +8 -0
- tnfr/validation/grammar.py +149 -0
- tnfr/validation/grammar.pyi +11 -0
- tnfr/validation/rules.py +194 -0
- tnfr/validation/rules.pyi +18 -0
- tnfr/validation/syntax.py +151 -0
- tnfr/validation/syntax.pyi +7 -0
- tnfr-6.0.0.dist-info/METADATA +135 -0
- tnfr-6.0.0.dist-info/RECORD +157 -0
- tnfr/cli.py +0 -322
- tnfr/config.py +0 -41
- tnfr/constants.py +0 -277
- tnfr/dynamics.py +0 -814
- tnfr/helpers.py +0 -264
- tnfr/main.py +0 -47
- tnfr/metrics.py +0 -597
- tnfr/operators.py +0 -525
- tnfr/program.py +0 -176
- tnfr/scenarios.py +0 -34
- tnfr/validators.py +0 -38
- tnfr-4.5.1.dist-info/METADATA +0 -221
- tnfr-4.5.1.dist-info/RECORD +0 -28
- {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/WHEEL +0 -0
- {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/licenses/LICENSE.md +0 -0
- {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/top_level.txt +0 -0
tnfr/utils/init.pyi
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
from collections.abc import Callable, Iterable, Iterator, Mapping
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Any, Hashable, Literal
|
|
4
|
+
|
|
5
|
+
__all__: tuple[str, ...]
|
|
6
|
+
|
|
7
|
+
def __getattr__(name: str) -> Any: ...
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class WarnOnce:
|
|
11
|
+
def __init__(self, logger: logging.Logger, msg: str, *, maxsize: int = ...) -> None: ...
|
|
12
|
+
def __call__(
|
|
13
|
+
self,
|
|
14
|
+
data: Mapping[Hashable, Any] | Hashable,
|
|
15
|
+
value: Any | None = ...,
|
|
16
|
+
) -> None: ...
|
|
17
|
+
def clear(self) -> None: ...
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class LazyImportProxy:
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
module_name: str,
|
|
24
|
+
attr: str | None,
|
|
25
|
+
emit: Literal["warn", "log", "both"],
|
|
26
|
+
fallback: Any | None,
|
|
27
|
+
) -> None: ...
|
|
28
|
+
def __getattr__(self, name: str) -> Any: ...
|
|
29
|
+
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
|
30
|
+
def __bool__(self) -> bool: ...
|
|
31
|
+
def __iter__(self) -> Iterator[Any]: ...
|
|
32
|
+
def resolve(self) -> Any: ...
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class ImportRegistry:
|
|
36
|
+
limit: int
|
|
37
|
+
failed: Mapping[str, None]
|
|
38
|
+
warned: set[str]
|
|
39
|
+
lock: Any
|
|
40
|
+
|
|
41
|
+
def record_failure(self, key: str, *, module: str | None = ...) -> None: ...
|
|
42
|
+
def discard(self, key: str) -> None: ...
|
|
43
|
+
def mark_warning(self, module: str) -> bool: ...
|
|
44
|
+
def clear(self) -> None: ...
|
|
45
|
+
def __contains__(self, key: str) -> bool: ...
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
EMIT_MAP: Mapping[str, Callable[[str], None]]
|
|
49
|
+
IMPORT_LOG: ImportRegistry
|
|
50
|
+
_IMPORT_STATE: ImportRegistry
|
|
51
|
+
_LOGGING_CONFIGURED: bool
|
|
52
|
+
_DEFAULT_CACHE_SIZE: int
|
|
53
|
+
_FAILED_IMPORT_LIMIT: int
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _configure_root() -> None: ...
|
|
57
|
+
def _reset_import_state() -> None: ...
|
|
58
|
+
def _reset_logging_state() -> None: ...
|
|
59
|
+
def _warn_failure(module: str, attr: str | None, err: Exception) -> None: ...
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def cached_import(
|
|
63
|
+
module_name: str,
|
|
64
|
+
attr: str | None = ...,
|
|
65
|
+
*,
|
|
66
|
+
fallback: Any | None = ...,
|
|
67
|
+
emit: Literal["warn", "log", "both"] = ...,
|
|
68
|
+
lazy: bool = ...,
|
|
69
|
+
) -> Any | None: ...
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def warm_cached_import(
|
|
73
|
+
module: str | tuple[str, str | None] | Iterable[str | tuple[str, str | None]],
|
|
74
|
+
*extra: str | tuple[str, str | None],
|
|
75
|
+
attr: str | None = ...,
|
|
76
|
+
fallback: Any | None = ...,
|
|
77
|
+
emit: Literal["warn", "log", "both"] = ...,
|
|
78
|
+
lazy: bool = ...,
|
|
79
|
+
resolve: bool = ...,
|
|
80
|
+
) -> Any | dict[str, Any | None]: ...
|
|
81
|
+
def get_logger(name: str) -> logging.Logger: ...
|
|
82
|
+
def get_numpy() -> Any | None: ...
|
|
83
|
+
def get_nodenx() -> Any | None: ...
|
|
84
|
+
def prune_failed_imports(*modules: str) -> None: ...
|
|
85
|
+
def warn_once(logger: logging.Logger, msg: str, *, maxsize: int = ...) -> WarnOnce: ...
|
tnfr/utils/io.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
"""Serialisation helpers for TNFR, including JSON convenience wrappers."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
import json
|
|
7
|
+
from typing import Any, Callable
|
|
8
|
+
|
|
9
|
+
from .init import cached_import, get_logger, warn_once
|
|
10
|
+
|
|
11
|
+
__all__ = (
|
|
12
|
+
"JsonDumpsParams",
|
|
13
|
+
"DEFAULT_PARAMS",
|
|
14
|
+
"json_dumps",
|
|
15
|
+
"clear_orjson_param_warnings",
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
_ORJSON_PARAMS_MSG = (
|
|
19
|
+
"'ensure_ascii', 'separators', 'cls' and extra kwargs are ignored when using orjson: %s"
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
logger = get_logger(__name__)
|
|
23
|
+
|
|
24
|
+
_warn_ignored_params_once = warn_once(logger, _ORJSON_PARAMS_MSG)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def clear_orjson_param_warnings() -> None:
|
|
28
|
+
"""Reset cached warnings for ignored :mod:`orjson` parameters."""
|
|
29
|
+
|
|
30
|
+
_warn_ignored_params_once.clear()
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _format_ignored_params(combo: frozenset[str]) -> str:
|
|
34
|
+
"""Return a stable representation for ignored parameter combinations."""
|
|
35
|
+
|
|
36
|
+
return "{" + ", ".join(map(repr, sorted(combo))) + "}"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@dataclass(frozen=True)
|
|
40
|
+
class JsonDumpsParams:
|
|
41
|
+
"""Container describing the parameters used by :func:`json_dumps`."""
|
|
42
|
+
|
|
43
|
+
sort_keys: bool = False
|
|
44
|
+
default: Callable[[Any], Any] | None = None
|
|
45
|
+
ensure_ascii: bool = True
|
|
46
|
+
separators: tuple[str, str] = (",", ":")
|
|
47
|
+
cls: type[json.JSONEncoder] | None = None
|
|
48
|
+
to_bytes: bool = False
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
DEFAULT_PARAMS = JsonDumpsParams()
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _collect_ignored_params(
|
|
55
|
+
params: JsonDumpsParams, extra_kwargs: dict[str, Any]
|
|
56
|
+
) -> frozenset[str]:
|
|
57
|
+
"""Return a stable set of parameters ignored by :mod:`orjson`."""
|
|
58
|
+
|
|
59
|
+
ignored: set[str] = set()
|
|
60
|
+
if params.ensure_ascii is not True:
|
|
61
|
+
ignored.add("ensure_ascii")
|
|
62
|
+
if params.separators != (",", ":"):
|
|
63
|
+
ignored.add("separators")
|
|
64
|
+
if params.cls is not None:
|
|
65
|
+
ignored.add("cls")
|
|
66
|
+
if extra_kwargs:
|
|
67
|
+
ignored.update(extra_kwargs.keys())
|
|
68
|
+
return frozenset(ignored)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _json_dumps_orjson(
|
|
72
|
+
orjson: Any,
|
|
73
|
+
obj: Any,
|
|
74
|
+
params: JsonDumpsParams,
|
|
75
|
+
**kwargs: Any,
|
|
76
|
+
) -> bytes | str:
|
|
77
|
+
"""Serialize using :mod:`orjson` and warn about unsupported parameters."""
|
|
78
|
+
|
|
79
|
+
ignored = _collect_ignored_params(params, kwargs)
|
|
80
|
+
if ignored:
|
|
81
|
+
_warn_ignored_params_once(ignored, _format_ignored_params(ignored))
|
|
82
|
+
|
|
83
|
+
option = orjson.OPT_SORT_KEYS if params.sort_keys else 0
|
|
84
|
+
data = orjson.dumps(obj, option=option, default=params.default)
|
|
85
|
+
return data if params.to_bytes else data.decode("utf-8")
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def _json_dumps_std(
|
|
89
|
+
obj: Any,
|
|
90
|
+
params: JsonDumpsParams,
|
|
91
|
+
**kwargs: Any,
|
|
92
|
+
) -> bytes | str:
|
|
93
|
+
"""Serialize using the standard library :func:`json.dumps`."""
|
|
94
|
+
|
|
95
|
+
result = json.dumps(
|
|
96
|
+
obj,
|
|
97
|
+
sort_keys=params.sort_keys,
|
|
98
|
+
ensure_ascii=params.ensure_ascii,
|
|
99
|
+
separators=params.separators,
|
|
100
|
+
cls=params.cls,
|
|
101
|
+
default=params.default,
|
|
102
|
+
**kwargs,
|
|
103
|
+
)
|
|
104
|
+
return result if not params.to_bytes else result.encode("utf-8")
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def json_dumps(
|
|
108
|
+
obj: Any,
|
|
109
|
+
*,
|
|
110
|
+
sort_keys: bool = False,
|
|
111
|
+
default: Callable[[Any], Any] | None = None,
|
|
112
|
+
ensure_ascii: bool = True,
|
|
113
|
+
separators: tuple[str, str] = (",", ":"),
|
|
114
|
+
cls: type[json.JSONEncoder] | None = None,
|
|
115
|
+
to_bytes: bool = False,
|
|
116
|
+
**kwargs: Any,
|
|
117
|
+
) -> bytes | str:
|
|
118
|
+
"""Serialize ``obj`` to JSON using ``orjson`` when available."""
|
|
119
|
+
|
|
120
|
+
if not isinstance(sort_keys, bool):
|
|
121
|
+
raise TypeError("sort_keys must be a boolean")
|
|
122
|
+
if default is not None and not callable(default):
|
|
123
|
+
raise TypeError("default must be callable when provided")
|
|
124
|
+
if not isinstance(ensure_ascii, bool):
|
|
125
|
+
raise TypeError("ensure_ascii must be a boolean")
|
|
126
|
+
if not isinstance(separators, tuple) or len(separators) != 2:
|
|
127
|
+
raise TypeError("separators must be a tuple of two strings")
|
|
128
|
+
if not all(isinstance(part, str) for part in separators):
|
|
129
|
+
raise TypeError("separators must be a tuple of two strings")
|
|
130
|
+
if cls is not None:
|
|
131
|
+
if not isinstance(cls, type) or not issubclass(cls, json.JSONEncoder):
|
|
132
|
+
raise TypeError("cls must be a subclass of json.JSONEncoder")
|
|
133
|
+
if not isinstance(to_bytes, bool):
|
|
134
|
+
raise TypeError("to_bytes must be a boolean")
|
|
135
|
+
|
|
136
|
+
if (
|
|
137
|
+
sort_keys is False
|
|
138
|
+
and default is None
|
|
139
|
+
and ensure_ascii is True
|
|
140
|
+
and separators == (",", ":")
|
|
141
|
+
and cls is None
|
|
142
|
+
and to_bytes is False
|
|
143
|
+
):
|
|
144
|
+
params = DEFAULT_PARAMS
|
|
145
|
+
else:
|
|
146
|
+
params = JsonDumpsParams(
|
|
147
|
+
sort_keys=sort_keys,
|
|
148
|
+
default=default,
|
|
149
|
+
ensure_ascii=ensure_ascii,
|
|
150
|
+
separators=separators,
|
|
151
|
+
cls=cls,
|
|
152
|
+
to_bytes=to_bytes,
|
|
153
|
+
)
|
|
154
|
+
orjson = cached_import("orjson", emit="log")
|
|
155
|
+
if orjson is not None:
|
|
156
|
+
return _json_dumps_orjson(orjson, obj, params, **kwargs)
|
|
157
|
+
return _json_dumps_std(obj, params, **kwargs)
|
tnfr/utils/io.pyi
ADDED
tnfr/utils/validators.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
"""Validation utilities for TNFR graph structures."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import numbers
|
|
6
|
+
import sys
|
|
7
|
+
|
|
8
|
+
from collections.abc import Mapping
|
|
9
|
+
from typing import Callable, Sequence
|
|
10
|
+
|
|
11
|
+
from .._compat import TypeAlias
|
|
12
|
+
|
|
13
|
+
from ..alias import get_attr
|
|
14
|
+
from ..config.constants import GLYPHS_CANONICAL_SET
|
|
15
|
+
from ..constants import get_aliases, get_param
|
|
16
|
+
from ..helpers.numeric import within_range
|
|
17
|
+
from ..types import (
|
|
18
|
+
EPIValue,
|
|
19
|
+
NodeId,
|
|
20
|
+
StructuralFrequency,
|
|
21
|
+
TNFRGraph,
|
|
22
|
+
)
|
|
23
|
+
ALIAS_EPI = get_aliases("EPI")
|
|
24
|
+
ALIAS_VF = get_aliases("VF")
|
|
25
|
+
|
|
26
|
+
ValidatorFunc: TypeAlias = Callable[[TNFRGraph], None]
|
|
27
|
+
"""Callable signature expected for validation routines."""
|
|
28
|
+
|
|
29
|
+
NodeData = Mapping[str, object]
|
|
30
|
+
"""Read-only node attribute mapping used by validators."""
|
|
31
|
+
|
|
32
|
+
AliasSequence = Sequence[str]
|
|
33
|
+
"""Sequence of accepted attribute aliases."""
|
|
34
|
+
|
|
35
|
+
__all__ = ("validate_window", "run_validators")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def validate_window(window: int, *, positive: bool = False) -> int:
|
|
39
|
+
"""Validate ``window`` as an ``int`` and return it.
|
|
40
|
+
|
|
41
|
+
Non-integer values raise :class:`TypeError`. When ``positive`` is ``True``
|
|
42
|
+
the value must be strictly greater than zero; otherwise it may be zero.
|
|
43
|
+
Negative values always raise :class:`ValueError`.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
if isinstance(window, bool) or not isinstance(window, numbers.Integral):
|
|
47
|
+
raise TypeError("'window' must be an integer")
|
|
48
|
+
if window < 0 or (positive and window == 0):
|
|
49
|
+
kind = "positive" if positive else "non-negative"
|
|
50
|
+
raise ValueError(f"'window'={window} must be {kind}")
|
|
51
|
+
return int(window)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def _require_attr(data: NodeData, alias: AliasSequence, node: NodeId, name: str) -> float:
|
|
55
|
+
"""Return attribute value or raise if missing."""
|
|
56
|
+
|
|
57
|
+
mapping: dict[str, object]
|
|
58
|
+
if isinstance(data, dict):
|
|
59
|
+
mapping = data
|
|
60
|
+
else:
|
|
61
|
+
mapping = dict(data)
|
|
62
|
+
val = get_attr(mapping, alias, None)
|
|
63
|
+
if val is None:
|
|
64
|
+
raise ValueError(f"Missing {name} attribute in node {node}")
|
|
65
|
+
return float(val)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def _validate_sigma(graph: TNFRGraph) -> None:
|
|
69
|
+
from ..sense import sigma_vector_from_graph
|
|
70
|
+
|
|
71
|
+
sv = sigma_vector_from_graph(graph)
|
|
72
|
+
if sv.get("mag", 0.0) > 1.0 + sys.float_info.epsilon:
|
|
73
|
+
raise ValueError("σ norm exceeds 1")
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
GRAPH_VALIDATORS: tuple[ValidatorFunc, ...] = (_validate_sigma,)
|
|
77
|
+
"""Ordered collection of graph-level validators."""
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _check_epi_vf(
|
|
81
|
+
epi: EPIValue,
|
|
82
|
+
vf: StructuralFrequency,
|
|
83
|
+
epi_min: float,
|
|
84
|
+
epi_max: float,
|
|
85
|
+
vf_min: float,
|
|
86
|
+
vf_max: float,
|
|
87
|
+
node: NodeId,
|
|
88
|
+
) -> None:
|
|
89
|
+
_check_range(epi, epi_min, epi_max, "EPI", node)
|
|
90
|
+
_check_range(vf, vf_min, vf_max, "VF", node)
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _out_of_range_msg(name: str, node: NodeId, val: float) -> str:
|
|
94
|
+
return f"{name} out of range in node {node}: {val}"
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def _check_range(
|
|
98
|
+
val: float,
|
|
99
|
+
lower: float,
|
|
100
|
+
upper: float,
|
|
101
|
+
name: str,
|
|
102
|
+
node: NodeId,
|
|
103
|
+
tol: float = 1e-9,
|
|
104
|
+
) -> None:
|
|
105
|
+
if not within_range(val, lower, upper, tol):
|
|
106
|
+
raise ValueError(_out_of_range_msg(name, node, val))
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _check_glyph(glyph: str | None, node: NodeId) -> None:
|
|
110
|
+
if glyph and glyph not in GLYPHS_CANONICAL_SET:
|
|
111
|
+
raise KeyError(f"Invalid glyph {glyph} in node {node}")
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def run_validators(graph: TNFRGraph) -> None:
|
|
115
|
+
"""Run all invariant validators on ``G`` with a single node pass."""
|
|
116
|
+
from ..glyph_history import last_glyph
|
|
117
|
+
|
|
118
|
+
epi_min = float(get_param(graph, "EPI_MIN"))
|
|
119
|
+
epi_max = float(get_param(graph, "EPI_MAX"))
|
|
120
|
+
vf_min = float(get_param(graph, "VF_MIN"))
|
|
121
|
+
vf_max = float(get_param(graph, "VF_MAX"))
|
|
122
|
+
|
|
123
|
+
for node, data in graph.nodes(data=True):
|
|
124
|
+
epi = EPIValue(_require_attr(data, ALIAS_EPI, node, "EPI"))
|
|
125
|
+
vf = StructuralFrequency(_require_attr(data, ALIAS_VF, node, "VF"))
|
|
126
|
+
_check_epi_vf(epi, vf, epi_min, epi_max, vf_min, vf_max, node)
|
|
127
|
+
_check_glyph(last_glyph(data), node)
|
|
128
|
+
|
|
129
|
+
for validator in GRAPH_VALIDATORS:
|
|
130
|
+
validator(graph)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from collections.abc import Mapping
|
|
2
|
+
from typing import Callable, Sequence
|
|
3
|
+
|
|
4
|
+
from ..types import TNFRGraph
|
|
5
|
+
from .._compat import TypeAlias
|
|
6
|
+
|
|
7
|
+
ValidatorFunc: TypeAlias = Callable[[TNFRGraph], None]
|
|
8
|
+
NodeData: TypeAlias = Mapping[str, object]
|
|
9
|
+
AliasSequence: TypeAlias = Sequence[str]
|
|
10
|
+
|
|
11
|
+
__all__: tuple[str, ...]
|
|
12
|
+
|
|
13
|
+
def __getattr__(name: str) -> object: ...
|
|
14
|
+
|
|
15
|
+
def validate_window(window: int, *, positive: bool = ...) -> int: ...
|
|
16
|
+
|
|
17
|
+
def run_validators(graph: TNFRGraph) -> None: ...
|
|
18
|
+
|
|
19
|
+
GRAPH_VALIDATORS: tuple[ValidatorFunc, ...]
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""Validation helpers for TNFR sequences."""
|
|
2
|
+
|
|
3
|
+
from .compatibility import CANON_COMPAT, CANON_FALLBACK
|
|
4
|
+
from .grammar import (
|
|
5
|
+
GrammarContext,
|
|
6
|
+
apply_glyph_with_grammar,
|
|
7
|
+
enforce_canonical_grammar,
|
|
8
|
+
on_applied_glyph,
|
|
9
|
+
)
|
|
10
|
+
from .rules import coerce_glyph, glyph_fallback, normalized_dnfr, get_norm
|
|
11
|
+
from .syntax import validate_sequence
|
|
12
|
+
|
|
13
|
+
__all__ = (
|
|
14
|
+
"validate_sequence",
|
|
15
|
+
"GrammarContext",
|
|
16
|
+
"apply_glyph_with_grammar",
|
|
17
|
+
"enforce_canonical_grammar",
|
|
18
|
+
"on_applied_glyph",
|
|
19
|
+
"coerce_glyph",
|
|
20
|
+
"glyph_fallback",
|
|
21
|
+
"normalized_dnfr",
|
|
22
|
+
"get_norm",
|
|
23
|
+
"CANON_COMPAT",
|
|
24
|
+
"CANON_FALLBACK",
|
|
25
|
+
)
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
__all__: Any
|
|
4
|
+
|
|
5
|
+
def __getattr__(name: str) -> Any: ...
|
|
6
|
+
|
|
7
|
+
CANON_COMPAT: Any
|
|
8
|
+
CANON_FALLBACK: Any
|
|
9
|
+
GrammarContext: Any
|
|
10
|
+
apply_glyph_with_grammar: Any
|
|
11
|
+
coerce_glyph: Any
|
|
12
|
+
enforce_canonical_grammar: Any
|
|
13
|
+
get_norm: Any
|
|
14
|
+
glyph_fallback: Any
|
|
15
|
+
normalized_dnfr: Any
|
|
16
|
+
on_applied_glyph: Any
|
|
17
|
+
validate_sequence: Any
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
"""Canonical TNFR compatibility tables.
|
|
2
|
+
|
|
3
|
+
This module centralises the canonical transitions that keep TNFR
|
|
4
|
+
coherence consistent across refactors. It is intentionally lightweight
|
|
5
|
+
so :mod:`tnfr.validation.grammar` can depend on it without introducing
|
|
6
|
+
cycles.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
from ..types import Glyph
|
|
12
|
+
|
|
13
|
+
__all__ = ["CANON_COMPAT", "CANON_FALLBACK"]
|
|
14
|
+
|
|
15
|
+
# Canonical compatibilities (allowed next glyphs)
|
|
16
|
+
CANON_COMPAT: dict[Glyph, set[Glyph]] = {
|
|
17
|
+
# Opening / initiation
|
|
18
|
+
Glyph.AL: {Glyph.EN, Glyph.RA, Glyph.NAV, Glyph.VAL, Glyph.UM},
|
|
19
|
+
Glyph.EN: {Glyph.IL, Glyph.UM, Glyph.RA, Glyph.NAV},
|
|
20
|
+
# Stabilisation / diffusion / coupling
|
|
21
|
+
Glyph.IL: {Glyph.RA, Glyph.VAL, Glyph.UM, Glyph.SHA},
|
|
22
|
+
Glyph.UM: {Glyph.RA, Glyph.IL, Glyph.VAL, Glyph.NAV},
|
|
23
|
+
Glyph.RA: {Glyph.IL, Glyph.VAL, Glyph.UM, Glyph.NAV},
|
|
24
|
+
Glyph.VAL: {Glyph.UM, Glyph.RA, Glyph.IL, Glyph.NAV},
|
|
25
|
+
# Dissonance → transition → mutation
|
|
26
|
+
Glyph.OZ: {Glyph.ZHIR, Glyph.NAV},
|
|
27
|
+
Glyph.ZHIR: {Glyph.IL, Glyph.NAV},
|
|
28
|
+
Glyph.NAV: {Glyph.OZ, Glyph.ZHIR, Glyph.RA, Glyph.IL, Glyph.UM},
|
|
29
|
+
# Closures / latent states
|
|
30
|
+
Glyph.SHA: {Glyph.AL, Glyph.EN},
|
|
31
|
+
Glyph.NUL: {Glyph.AL, Glyph.IL},
|
|
32
|
+
# Self-organising blocks
|
|
33
|
+
Glyph.THOL: {
|
|
34
|
+
Glyph.OZ,
|
|
35
|
+
Glyph.ZHIR,
|
|
36
|
+
Glyph.NAV,
|
|
37
|
+
Glyph.RA,
|
|
38
|
+
Glyph.IL,
|
|
39
|
+
Glyph.UM,
|
|
40
|
+
Glyph.SHA,
|
|
41
|
+
Glyph.NUL,
|
|
42
|
+
},
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
# Canonical fallbacks when a transition is not allowed
|
|
46
|
+
CANON_FALLBACK: dict[Glyph, Glyph] = {
|
|
47
|
+
Glyph.AL: Glyph.EN,
|
|
48
|
+
Glyph.EN: Glyph.IL,
|
|
49
|
+
Glyph.IL: Glyph.RA,
|
|
50
|
+
Glyph.NAV: Glyph.RA,
|
|
51
|
+
Glyph.NUL: Glyph.AL,
|
|
52
|
+
Glyph.OZ: Glyph.ZHIR,
|
|
53
|
+
Glyph.RA: Glyph.IL,
|
|
54
|
+
Glyph.SHA: Glyph.AL,
|
|
55
|
+
Glyph.THOL: Glyph.NAV,
|
|
56
|
+
Glyph.UM: Glyph.RA,
|
|
57
|
+
Glyph.VAL: Glyph.RA,
|
|
58
|
+
Glyph.ZHIR: Glyph.IL,
|
|
59
|
+
}
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
"""Canonical grammar enforcement utilities."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Iterable
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from typing import Any, Optional, TYPE_CHECKING, cast
|
|
8
|
+
|
|
9
|
+
from ..constants import DEFAULTS, get_param
|
|
10
|
+
from ..operators import apply_glyph
|
|
11
|
+
from ..types import Glyph, NodeId, TNFRGraph
|
|
12
|
+
from .compatibility import CANON_COMPAT
|
|
13
|
+
from . import rules as _rules
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING: # pragma: no cover - typing only
|
|
16
|
+
from ..node import NodeProtocol
|
|
17
|
+
|
|
18
|
+
__all__ = [
|
|
19
|
+
"GrammarContext",
|
|
20
|
+
"_gram_state",
|
|
21
|
+
"enforce_canonical_grammar",
|
|
22
|
+
"on_applied_glyph",
|
|
23
|
+
"apply_glyph_with_grammar",
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class GrammarContext:
|
|
29
|
+
"""Shared context for grammar helpers.
|
|
30
|
+
|
|
31
|
+
Collects graph-level settings to reduce positional parameters across
|
|
32
|
+
helper functions.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
G: TNFRGraph
|
|
36
|
+
cfg_soft: dict[str, Any]
|
|
37
|
+
cfg_canon: dict[str, Any]
|
|
38
|
+
norms: dict[str, Any]
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def from_graph(cls, G: TNFRGraph) -> "GrammarContext":
|
|
42
|
+
"""Create a :class:`GrammarContext` for ``G``."""
|
|
43
|
+
|
|
44
|
+
return cls(
|
|
45
|
+
G=G,
|
|
46
|
+
cfg_soft=G.graph.get("GRAMMAR", DEFAULTS.get("GRAMMAR", {})),
|
|
47
|
+
cfg_canon=G.graph.get(
|
|
48
|
+
"GRAMMAR_CANON", DEFAULTS.get("GRAMMAR_CANON", {})
|
|
49
|
+
),
|
|
50
|
+
norms=G.graph.get("_sel_norms") or {},
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
# -------------------------
|
|
55
|
+
# Per-node grammar state
|
|
56
|
+
# -------------------------
|
|
57
|
+
|
|
58
|
+
def _gram_state(nd: dict[str, Any]) -> dict[str, Any]:
|
|
59
|
+
"""Create or return the node grammar state."""
|
|
60
|
+
|
|
61
|
+
return nd.setdefault("_GRAM", {"thol_open": False, "thol_len": 0})
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
# -------------------------
|
|
65
|
+
# Core: enforce grammar on a candidate
|
|
66
|
+
# -------------------------
|
|
67
|
+
|
|
68
|
+
def enforce_canonical_grammar(
|
|
69
|
+
G: TNFRGraph,
|
|
70
|
+
n: NodeId,
|
|
71
|
+
cand: Glyph | str,
|
|
72
|
+
ctx: Optional[GrammarContext] = None,
|
|
73
|
+
) -> Glyph | str:
|
|
74
|
+
"""Validate and adjust a candidate glyph according to canonical grammar."""
|
|
75
|
+
|
|
76
|
+
if ctx is None:
|
|
77
|
+
ctx = GrammarContext.from_graph(G)
|
|
78
|
+
|
|
79
|
+
nd = ctx.G.nodes[n]
|
|
80
|
+
st = _gram_state(nd)
|
|
81
|
+
|
|
82
|
+
raw_cand = cand
|
|
83
|
+
cand = _rules.coerce_glyph(cand)
|
|
84
|
+
input_was_str = isinstance(raw_cand, str)
|
|
85
|
+
|
|
86
|
+
# 0) If glyphs outside the alphabet arrive, leave untouched
|
|
87
|
+
if not isinstance(cand, Glyph) or cand not in CANON_COMPAT:
|
|
88
|
+
return raw_cand if input_was_str else cand
|
|
89
|
+
|
|
90
|
+
original = cand
|
|
91
|
+
cand = _rules._check_repeats(ctx, n, cand)
|
|
92
|
+
|
|
93
|
+
cand = _rules._maybe_force(ctx, n, cand, original, _rules.normalized_dnfr, "force_dnfr")
|
|
94
|
+
cand = _rules._maybe_force(ctx, n, cand, original, _rules._accel_norm, "force_accel")
|
|
95
|
+
cand = _rules._check_oz_to_zhir(ctx, n, cand)
|
|
96
|
+
cand = _rules._check_thol_closure(ctx, n, cand, st)
|
|
97
|
+
cand = _rules._check_compatibility(ctx, n, cand)
|
|
98
|
+
|
|
99
|
+
coerced_final = _rules.coerce_glyph(cand)
|
|
100
|
+
if input_was_str:
|
|
101
|
+
if isinstance(coerced_final, Glyph):
|
|
102
|
+
return coerced_final.value
|
|
103
|
+
return str(cand)
|
|
104
|
+
return coerced_final if isinstance(coerced_final, Glyph) else cand
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
# -------------------------
|
|
108
|
+
# Post-selection: update grammar state
|
|
109
|
+
# -------------------------
|
|
110
|
+
|
|
111
|
+
def on_applied_glyph(G: TNFRGraph, n: NodeId, applied: Glyph | str) -> None:
|
|
112
|
+
nd = G.nodes[n]
|
|
113
|
+
st = _gram_state(nd)
|
|
114
|
+
try:
|
|
115
|
+
glyph = applied if isinstance(applied, Glyph) else Glyph(str(applied))
|
|
116
|
+
except ValueError:
|
|
117
|
+
glyph = None
|
|
118
|
+
|
|
119
|
+
if glyph is Glyph.THOL:
|
|
120
|
+
st["thol_open"] = True
|
|
121
|
+
st["thol_len"] = 0
|
|
122
|
+
elif glyph in (Glyph.SHA, Glyph.NUL):
|
|
123
|
+
st["thol_open"] = False
|
|
124
|
+
st["thol_len"] = 0
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
# -------------------------
|
|
128
|
+
# Direct application with canonical grammar
|
|
129
|
+
# -------------------------
|
|
130
|
+
|
|
131
|
+
def apply_glyph_with_grammar(
|
|
132
|
+
G: TNFRGraph,
|
|
133
|
+
nodes: Optional[Iterable[NodeId | "NodeProtocol"]],
|
|
134
|
+
glyph: Glyph | str,
|
|
135
|
+
window: Optional[int] = None,
|
|
136
|
+
) -> None:
|
|
137
|
+
"""Apply ``glyph`` to ``nodes`` enforcing the canonical grammar."""
|
|
138
|
+
|
|
139
|
+
if window is None:
|
|
140
|
+
window = get_param(G, "GLYPH_HYSTERESIS_WINDOW")
|
|
141
|
+
|
|
142
|
+
g_str = glyph.value if isinstance(glyph, Glyph) else str(glyph)
|
|
143
|
+
iter_nodes = G.nodes() if nodes is None else nodes
|
|
144
|
+
ctx = GrammarContext.from_graph(G)
|
|
145
|
+
for node_ref in iter_nodes:
|
|
146
|
+
node_id = cast(NodeId, getattr(node_ref, "n", node_ref))
|
|
147
|
+
g_eff = enforce_canonical_grammar(G, node_id, g_str, ctx)
|
|
148
|
+
apply_glyph(G, node_id, g_eff, window=window)
|
|
149
|
+
on_applied_glyph(G, node_id, g_eff)
|