tnfr 4.5.2__py3-none-any.whl → 7.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +275 -51
- tnfr/__init__.pyi +33 -0
- tnfr/_compat.py +10 -0
- tnfr/_generated_version.py +34 -0
- tnfr/_version.py +49 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +117 -31
- tnfr/alias.pyi +108 -0
- tnfr/cache.py +6 -572
- tnfr/cache.pyi +16 -0
- tnfr/callback_utils.py +16 -38
- tnfr/callback_utils.pyi +79 -0
- tnfr/cli/__init__.py +34 -14
- tnfr/cli/__init__.pyi +26 -0
- tnfr/cli/arguments.py +211 -28
- tnfr/cli/arguments.pyi +27 -0
- tnfr/cli/execution.py +470 -50
- tnfr/cli/execution.pyi +70 -0
- tnfr/cli/utils.py +18 -3
- tnfr/cli/utils.pyi +8 -0
- tnfr/config/__init__.py +13 -0
- tnfr/config/__init__.pyi +10 -0
- tnfr/{constants_glyphs.py → config/constants.py} +26 -20
- tnfr/config/constants.pyi +12 -0
- tnfr/config/feature_flags.py +83 -0
- tnfr/{config.py → config/init.py} +11 -7
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +93 -0
- tnfr/config/operator_names.pyi +28 -0
- tnfr/config/presets.py +84 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/constants/__init__.py +80 -29
- tnfr/constants/__init__.pyi +92 -0
- tnfr/constants/aliases.py +31 -0
- tnfr/constants/core.py +4 -4
- tnfr/constants/core.pyi +17 -0
- tnfr/constants/init.py +1 -1
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +7 -15
- tnfr/constants/metric.pyi +19 -0
- tnfr/dynamics/__init__.py +165 -633
- tnfr/dynamics/__init__.pyi +82 -0
- tnfr/dynamics/adaptation.py +267 -0
- tnfr/dynamics/aliases.py +23 -0
- tnfr/dynamics/coordination.py +385 -0
- tnfr/dynamics/dnfr.py +2283 -400
- tnfr/dynamics/dnfr.pyi +24 -0
- tnfr/dynamics/integrators.py +406 -98
- tnfr/dynamics/integrators.pyi +34 -0
- tnfr/dynamics/runtime.py +881 -0
- tnfr/dynamics/sampling.py +10 -5
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +719 -0
- tnfr/execution.py +70 -48
- tnfr/execution.pyi +45 -0
- tnfr/flatten.py +13 -9
- tnfr/flatten.pyi +21 -0
- tnfr/gamma.py +66 -53
- tnfr/gamma.pyi +34 -0
- tnfr/glyph_history.py +110 -52
- tnfr/glyph_history.pyi +35 -0
- tnfr/glyph_runtime.py +16 -0
- tnfr/glyph_runtime.pyi +9 -0
- tnfr/immutable.py +69 -28
- tnfr/immutable.pyi +34 -0
- tnfr/initialization.py +16 -16
- tnfr/initialization.pyi +65 -0
- tnfr/io.py +6 -240
- tnfr/io.pyi +16 -0
- tnfr/locking.pyi +7 -0
- tnfr/mathematics/__init__.py +81 -0
- tnfr/mathematics/backend.py +426 -0
- tnfr/mathematics/dynamics.py +398 -0
- tnfr/mathematics/epi.py +254 -0
- tnfr/mathematics/generators.py +222 -0
- tnfr/mathematics/metrics.py +119 -0
- tnfr/mathematics/operators.py +233 -0
- tnfr/mathematics/operators_factory.py +71 -0
- tnfr/mathematics/projection.py +78 -0
- tnfr/mathematics/runtime.py +173 -0
- tnfr/mathematics/spaces.py +247 -0
- tnfr/mathematics/transforms.py +292 -0
- tnfr/metrics/__init__.py +10 -10
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/coherence.py +993 -324
- tnfr/metrics/common.py +23 -16
- tnfr/metrics/common.pyi +46 -0
- tnfr/metrics/core.py +251 -35
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +708 -111
- tnfr/metrics/diagnosis.pyi +85 -0
- tnfr/metrics/export.py +27 -15
- tnfr/metrics/glyph_timing.py +232 -42
- tnfr/metrics/reporting.py +33 -22
- tnfr/metrics/reporting.pyi +12 -0
- tnfr/metrics/sense_index.py +987 -43
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +214 -23
- tnfr/metrics/trig.pyi +13 -0
- tnfr/metrics/trig_cache.py +115 -22
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/node.py +542 -136
- tnfr/node.pyi +178 -0
- tnfr/observers.py +152 -35
- tnfr/observers.pyi +31 -0
- tnfr/ontosim.py +23 -19
- tnfr/ontosim.pyi +28 -0
- tnfr/operators/__init__.py +601 -82
- tnfr/operators/__init__.pyi +45 -0
- tnfr/operators/definitions.py +513 -0
- tnfr/operators/definitions.pyi +78 -0
- tnfr/operators/grammar.py +760 -0
- tnfr/operators/jitter.py +107 -38
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/registry.py +75 -0
- tnfr/operators/registry.pyi +13 -0
- tnfr/operators/remesh.py +149 -88
- tnfr/py.typed +0 -0
- tnfr/rng.py +46 -143
- tnfr/rng.pyi +14 -0
- tnfr/schemas/__init__.py +8 -0
- tnfr/schemas/grammar.json +94 -0
- tnfr/selector.py +25 -19
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +72 -62
- tnfr/sense.pyi +23 -0
- tnfr/structural.py +522 -262
- tnfr/structural.pyi +69 -0
- tnfr/telemetry/__init__.py +35 -0
- tnfr/telemetry/cache_metrics.py +226 -0
- tnfr/telemetry/nu_f.py +423 -0
- tnfr/telemetry/nu_f.pyi +123 -0
- tnfr/telemetry/verbosity.py +37 -0
- tnfr/tokens.py +1 -3
- tnfr/tokens.pyi +36 -0
- tnfr/trace.py +270 -113
- tnfr/trace.pyi +40 -0
- tnfr/types.py +574 -6
- tnfr/types.pyi +331 -0
- tnfr/units.py +69 -0
- tnfr/units.pyi +16 -0
- tnfr/utils/__init__.py +217 -0
- tnfr/utils/__init__.pyi +202 -0
- tnfr/utils/cache.py +2395 -0
- tnfr/utils/cache.pyi +468 -0
- tnfr/utils/chunks.py +104 -0
- tnfr/utils/chunks.pyi +21 -0
- tnfr/{collections_utils.py → utils/data.py} +147 -90
- tnfr/utils/data.pyi +64 -0
- tnfr/utils/graph.py +85 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +770 -0
- tnfr/utils/init.pyi +78 -0
- tnfr/utils/io.py +456 -0
- tnfr/{helpers → utils}/numeric.py +51 -24
- tnfr/utils/numeric.pyi +21 -0
- tnfr/validation/__init__.py +113 -0
- tnfr/validation/__init__.pyi +77 -0
- tnfr/validation/compatibility.py +95 -0
- tnfr/validation/compatibility.pyi +6 -0
- tnfr/validation/grammar.py +71 -0
- tnfr/validation/grammar.pyi +40 -0
- tnfr/validation/graph.py +138 -0
- tnfr/validation/graph.pyi +17 -0
- tnfr/validation/rules.py +281 -0
- tnfr/validation/rules.pyi +55 -0
- tnfr/validation/runtime.py +263 -0
- tnfr/validation/runtime.pyi +31 -0
- tnfr/validation/soft_filters.py +170 -0
- tnfr/validation/soft_filters.pyi +37 -0
- tnfr/validation/spectral.py +159 -0
- tnfr/validation/spectral.pyi +46 -0
- tnfr/validation/syntax.py +40 -0
- tnfr/validation/syntax.pyi +10 -0
- tnfr/validation/window.py +39 -0
- tnfr/validation/window.pyi +1 -0
- tnfr/viz/__init__.py +9 -0
- tnfr/viz/matplotlib.py +246 -0
- tnfr-7.0.0.dist-info/METADATA +179 -0
- tnfr-7.0.0.dist-info/RECORD +185 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/licenses/LICENSE.md +1 -1
- tnfr/grammar.py +0 -344
- tnfr/graph_utils.py +0 -84
- tnfr/helpers/__init__.py +0 -71
- tnfr/import_utils.py +0 -228
- tnfr/json_utils.py +0 -162
- tnfr/logging_utils.py +0 -116
- tnfr/presets.py +0 -60
- tnfr/validators.py +0 -84
- tnfr/value_utils.py +0 -59
- tnfr-4.5.2.dist-info/METADATA +0 -379
- tnfr-4.5.2.dist-info/RECORD +0 -67
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/WHEEL +0 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.2.dist-info → tnfr-7.0.0.dist-info}/top_level.txt +0 -0
tnfr/utils/init.pyi
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from collections.abc import Callable, Iterable, Iterator, Mapping
|
|
3
|
+
from typing import Any, Hashable, Literal
|
|
4
|
+
|
|
5
|
+
__all__: tuple[str, ...]
|
|
6
|
+
|
|
7
|
+
def __getattr__(name: str) -> Any: ...
|
|
8
|
+
|
|
9
|
+
class WarnOnce:
|
|
10
|
+
def __init__(
|
|
11
|
+
self, logger: logging.Logger, msg: str, *, maxsize: int = ...
|
|
12
|
+
) -> None: ...
|
|
13
|
+
def __call__(
|
|
14
|
+
self,
|
|
15
|
+
data: Mapping[Hashable, Any] | Hashable,
|
|
16
|
+
value: Any | None = ...,
|
|
17
|
+
) -> None: ...
|
|
18
|
+
def clear(self) -> None: ...
|
|
19
|
+
|
|
20
|
+
class LazyImportProxy:
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
module_name: str,
|
|
24
|
+
attr: str | None,
|
|
25
|
+
emit: Literal["warn", "log", "both"],
|
|
26
|
+
fallback: Any | None,
|
|
27
|
+
) -> None: ...
|
|
28
|
+
def __getattr__(self, name: str) -> Any: ...
|
|
29
|
+
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
|
|
30
|
+
def __bool__(self) -> bool: ...
|
|
31
|
+
def __iter__(self) -> Iterator[Any]: ...
|
|
32
|
+
def resolve(self) -> Any: ...
|
|
33
|
+
|
|
34
|
+
class ImportRegistry:
|
|
35
|
+
limit: int
|
|
36
|
+
failed: Mapping[str, None]
|
|
37
|
+
warned: set[str]
|
|
38
|
+
lock: Any
|
|
39
|
+
|
|
40
|
+
def record_failure(self, key: str, *, module: str | None = ...) -> None: ...
|
|
41
|
+
def discard(self, key: str) -> None: ...
|
|
42
|
+
def mark_warning(self, module: str) -> bool: ...
|
|
43
|
+
def clear(self) -> None: ...
|
|
44
|
+
def __contains__(self, key: str) -> bool: ...
|
|
45
|
+
|
|
46
|
+
EMIT_MAP: Mapping[str, Callable[[str], None]]
|
|
47
|
+
IMPORT_LOG: ImportRegistry
|
|
48
|
+
_IMPORT_STATE: ImportRegistry
|
|
49
|
+
_LOGGING_CONFIGURED: bool
|
|
50
|
+
_DEFAULT_CACHE_SIZE: int
|
|
51
|
+
_FAILED_IMPORT_LIMIT: int
|
|
52
|
+
|
|
53
|
+
def _configure_root() -> None: ...
|
|
54
|
+
def _reset_import_state() -> None: ...
|
|
55
|
+
def _reset_logging_state() -> None: ...
|
|
56
|
+
def _warn_failure(module: str, attr: str | None, err: Exception) -> None: ...
|
|
57
|
+
def cached_import(
|
|
58
|
+
module_name: str,
|
|
59
|
+
attr: str | None = ...,
|
|
60
|
+
*,
|
|
61
|
+
fallback: Any | None = ...,
|
|
62
|
+
emit: Literal["warn", "log", "both"] = ...,
|
|
63
|
+
lazy: bool = ...,
|
|
64
|
+
) -> Any | None: ...
|
|
65
|
+
def warm_cached_import(
|
|
66
|
+
module: str | tuple[str, str | None] | Iterable[str | tuple[str, str | None]],
|
|
67
|
+
*extra: str | tuple[str, str | None],
|
|
68
|
+
attr: str | None = ...,
|
|
69
|
+
fallback: Any | None = ...,
|
|
70
|
+
emit: Literal["warn", "log", "both"] = ...,
|
|
71
|
+
lazy: bool = ...,
|
|
72
|
+
resolve: bool = ...,
|
|
73
|
+
) -> Any | dict[str, Any | None]: ...
|
|
74
|
+
def get_logger(name: str) -> logging.Logger: ...
|
|
75
|
+
def get_numpy() -> Any | None: ...
|
|
76
|
+
def get_nodenx() -> Any | None: ...
|
|
77
|
+
def prune_failed_imports(*modules: str) -> None: ...
|
|
78
|
+
def warn_once(logger: logging.Logger, msg: str, *, maxsize: int = ...) -> WarnOnce: ...
|
tnfr/utils/io.py
ADDED
|
@@ -0,0 +1,456 @@
|
|
|
1
|
+
"""Structured file and JSON utilities shared across the TNFR engine."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
import tempfile
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from functools import partial
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any, Callable
|
|
12
|
+
|
|
13
|
+
from .init import LazyImportProxy, cached_import, get_logger, warn_once
|
|
14
|
+
|
|
15
|
+
logger = get_logger(__name__)
|
|
16
|
+
|
|
17
|
+
_ORJSON_PARAMS_MSG = "'ensure_ascii', 'separators', 'cls' and extra kwargs are ignored when using orjson: %s"
|
|
18
|
+
|
|
19
|
+
_warn_ignored_params_once = warn_once(logger, _ORJSON_PARAMS_MSG)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def clear_orjson_param_warnings() -> None:
|
|
23
|
+
"""Reset cached warnings for ignored :mod:`orjson` parameters."""
|
|
24
|
+
|
|
25
|
+
_warn_ignored_params_once.clear()
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _format_ignored_params(combo: frozenset[str]) -> str:
|
|
29
|
+
"""Return a stable representation for ignored parameter combinations."""
|
|
30
|
+
|
|
31
|
+
return "{" + ", ".join(map(repr, sorted(combo))) + "}"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass(frozen=True)
|
|
35
|
+
class JsonDumpsParams:
|
|
36
|
+
"""Container describing the parameters used by :func:`json_dumps`."""
|
|
37
|
+
|
|
38
|
+
sort_keys: bool = False
|
|
39
|
+
default: Callable[[Any], Any] | None = None
|
|
40
|
+
ensure_ascii: bool = True
|
|
41
|
+
separators: tuple[str, str] = (",", ":")
|
|
42
|
+
cls: type[json.JSONEncoder] | None = None
|
|
43
|
+
to_bytes: bool = False
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
DEFAULT_PARAMS = JsonDumpsParams()
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _collect_ignored_params(
|
|
50
|
+
params: JsonDumpsParams, extra_kwargs: dict[str, Any]
|
|
51
|
+
) -> frozenset[str]:
|
|
52
|
+
"""Return a stable set of parameters ignored by :mod:`orjson`."""
|
|
53
|
+
|
|
54
|
+
ignored: set[str] = set()
|
|
55
|
+
if params.ensure_ascii is not True:
|
|
56
|
+
ignored.add("ensure_ascii")
|
|
57
|
+
if params.separators != (",", ":"):
|
|
58
|
+
ignored.add("separators")
|
|
59
|
+
if params.cls is not None:
|
|
60
|
+
ignored.add("cls")
|
|
61
|
+
if extra_kwargs:
|
|
62
|
+
ignored.update(extra_kwargs.keys())
|
|
63
|
+
return frozenset(ignored)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _json_dumps_orjson(
|
|
67
|
+
orjson: Any,
|
|
68
|
+
obj: Any,
|
|
69
|
+
params: JsonDumpsParams,
|
|
70
|
+
**kwargs: Any,
|
|
71
|
+
) -> bytes | str:
|
|
72
|
+
"""Serialize using :mod:`orjson` and warn about unsupported parameters."""
|
|
73
|
+
|
|
74
|
+
ignored = _collect_ignored_params(params, kwargs)
|
|
75
|
+
if ignored:
|
|
76
|
+
_warn_ignored_params_once(ignored, _format_ignored_params(ignored))
|
|
77
|
+
|
|
78
|
+
option = orjson.OPT_SORT_KEYS if params.sort_keys else 0
|
|
79
|
+
data = orjson.dumps(obj, option=option, default=params.default)
|
|
80
|
+
return data if params.to_bytes else data.decode("utf-8")
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _json_dumps_std(
|
|
84
|
+
obj: Any,
|
|
85
|
+
params: JsonDumpsParams,
|
|
86
|
+
**kwargs: Any,
|
|
87
|
+
) -> bytes | str:
|
|
88
|
+
"""Serialize using the standard library :func:`json.dumps`."""
|
|
89
|
+
|
|
90
|
+
result = json.dumps(
|
|
91
|
+
obj,
|
|
92
|
+
sort_keys=params.sort_keys,
|
|
93
|
+
ensure_ascii=params.ensure_ascii,
|
|
94
|
+
separators=params.separators,
|
|
95
|
+
cls=params.cls,
|
|
96
|
+
default=params.default,
|
|
97
|
+
**kwargs,
|
|
98
|
+
)
|
|
99
|
+
return result if not params.to_bytes else result.encode("utf-8")
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def json_dumps(
|
|
103
|
+
obj: Any,
|
|
104
|
+
*,
|
|
105
|
+
sort_keys: bool = False,
|
|
106
|
+
default: Callable[[Any], Any] | None = None,
|
|
107
|
+
ensure_ascii: bool = True,
|
|
108
|
+
separators: tuple[str, str] = (",", ":"),
|
|
109
|
+
cls: type[json.JSONEncoder] | None = None,
|
|
110
|
+
to_bytes: bool = False,
|
|
111
|
+
**kwargs: Any,
|
|
112
|
+
) -> bytes | str:
|
|
113
|
+
"""Serialize ``obj`` to JSON using ``orjson`` when available."""
|
|
114
|
+
|
|
115
|
+
if not isinstance(sort_keys, bool):
|
|
116
|
+
raise TypeError("sort_keys must be a boolean")
|
|
117
|
+
if default is not None and not callable(default):
|
|
118
|
+
raise TypeError("default must be callable when provided")
|
|
119
|
+
if not isinstance(ensure_ascii, bool):
|
|
120
|
+
raise TypeError("ensure_ascii must be a boolean")
|
|
121
|
+
if not isinstance(separators, tuple) or len(separators) != 2:
|
|
122
|
+
raise TypeError("separators must be a tuple of two strings")
|
|
123
|
+
if not all(isinstance(part, str) for part in separators):
|
|
124
|
+
raise TypeError("separators must be a tuple of two strings")
|
|
125
|
+
if cls is not None:
|
|
126
|
+
if not isinstance(cls, type) or not issubclass(cls, json.JSONEncoder):
|
|
127
|
+
raise TypeError("cls must be a subclass of json.JSONEncoder")
|
|
128
|
+
if not isinstance(to_bytes, bool):
|
|
129
|
+
raise TypeError("to_bytes must be a boolean")
|
|
130
|
+
|
|
131
|
+
if (
|
|
132
|
+
sort_keys is False
|
|
133
|
+
and default is None
|
|
134
|
+
and ensure_ascii is True
|
|
135
|
+
and separators == (",", ":")
|
|
136
|
+
and cls is None
|
|
137
|
+
and to_bytes is False
|
|
138
|
+
):
|
|
139
|
+
params = DEFAULT_PARAMS
|
|
140
|
+
else:
|
|
141
|
+
params = JsonDumpsParams(
|
|
142
|
+
sort_keys=sort_keys,
|
|
143
|
+
default=default,
|
|
144
|
+
ensure_ascii=ensure_ascii,
|
|
145
|
+
separators=separators,
|
|
146
|
+
cls=cls,
|
|
147
|
+
to_bytes=to_bytes,
|
|
148
|
+
)
|
|
149
|
+
orjson = cached_import("orjson", emit="log")
|
|
150
|
+
if orjson is not None:
|
|
151
|
+
return _json_dumps_orjson(orjson, obj, params, **kwargs)
|
|
152
|
+
return _json_dumps_std(obj, params, **kwargs)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def _raise_import_error(name: str, *_: Any, **__: Any) -> Any:
|
|
156
|
+
raise ImportError(f"{name} is not installed")
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
_MISSING_TOML_ERROR = type(
|
|
160
|
+
"MissingTOMLDependencyError",
|
|
161
|
+
(Exception,),
|
|
162
|
+
{"__doc__": "Fallback error used when tomllib/tomli is missing."},
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
_MISSING_YAML_ERROR = type(
|
|
166
|
+
"MissingPyYAMLDependencyError",
|
|
167
|
+
(Exception,),
|
|
168
|
+
{"__doc__": "Fallback error used when pyyaml is missing."},
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def _resolve_lazy(value: Any) -> Any:
|
|
173
|
+
if isinstance(value, LazyImportProxy):
|
|
174
|
+
return value.resolve()
|
|
175
|
+
return value
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
class _LazyBool:
|
|
179
|
+
__slots__ = ("_value",)
|
|
180
|
+
|
|
181
|
+
def __init__(self, value: Any) -> None:
|
|
182
|
+
self._value = value
|
|
183
|
+
|
|
184
|
+
def __bool__(self) -> bool:
|
|
185
|
+
return _resolve_lazy(self._value) is not None
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
_TOMLI_MODULE = cached_import("tomli", emit="log", lazy=True)
|
|
189
|
+
tomllib = cached_import(
|
|
190
|
+
"tomllib",
|
|
191
|
+
emit="log",
|
|
192
|
+
lazy=True,
|
|
193
|
+
fallback=_TOMLI_MODULE,
|
|
194
|
+
)
|
|
195
|
+
has_toml = _LazyBool(tomllib)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
_TOMLI_TOML_ERROR = cached_import(
|
|
199
|
+
"tomli",
|
|
200
|
+
"TOMLDecodeError",
|
|
201
|
+
emit="log",
|
|
202
|
+
lazy=True,
|
|
203
|
+
fallback=_MISSING_TOML_ERROR,
|
|
204
|
+
)
|
|
205
|
+
TOMLDecodeError = cached_import(
|
|
206
|
+
"tomllib",
|
|
207
|
+
"TOMLDecodeError",
|
|
208
|
+
emit="log",
|
|
209
|
+
lazy=True,
|
|
210
|
+
fallback=_TOMLI_TOML_ERROR,
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
_TOMLI_LOADS = cached_import(
|
|
215
|
+
"tomli",
|
|
216
|
+
"loads",
|
|
217
|
+
emit="log",
|
|
218
|
+
lazy=True,
|
|
219
|
+
fallback=partial(_raise_import_error, "tomllib/tomli"),
|
|
220
|
+
)
|
|
221
|
+
_TOML_LOADS: Callable[[str], Any] = cached_import(
|
|
222
|
+
"tomllib",
|
|
223
|
+
"loads",
|
|
224
|
+
emit="log",
|
|
225
|
+
lazy=True,
|
|
226
|
+
fallback=_TOMLI_LOADS,
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
yaml = cached_import("yaml", emit="log", lazy=True)
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
YAMLError = cached_import(
|
|
234
|
+
"yaml",
|
|
235
|
+
"YAMLError",
|
|
236
|
+
emit="log",
|
|
237
|
+
lazy=True,
|
|
238
|
+
fallback=_MISSING_YAML_ERROR,
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
_YAML_SAFE_LOAD: Callable[[str], Any] = cached_import(
|
|
243
|
+
"yaml",
|
|
244
|
+
"safe_load",
|
|
245
|
+
emit="log",
|
|
246
|
+
lazy=True,
|
|
247
|
+
fallback=partial(_raise_import_error, "pyyaml"),
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def _parse_yaml(text: str) -> Any:
|
|
252
|
+
"""Parse YAML ``text`` using ``safe_load`` if available."""
|
|
253
|
+
|
|
254
|
+
return _YAML_SAFE_LOAD(text)
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def _parse_toml(text: str) -> Any:
|
|
258
|
+
"""Parse TOML ``text`` using ``tomllib`` or ``tomli``."""
|
|
259
|
+
|
|
260
|
+
return _TOML_LOADS(text)
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
PARSERS = {
|
|
264
|
+
".json": json.loads,
|
|
265
|
+
".yaml": _parse_yaml,
|
|
266
|
+
".yml": _parse_yaml,
|
|
267
|
+
".toml": _parse_toml,
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def _get_parser(suffix: str) -> Callable[[str], Any]:
|
|
272
|
+
try:
|
|
273
|
+
return PARSERS[suffix]
|
|
274
|
+
except KeyError as exc:
|
|
275
|
+
raise ValueError(f"Unsupported suffix: {suffix}") from exc
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
_BASE_ERROR_MESSAGES: dict[type[BaseException], str] = {
|
|
279
|
+
OSError: "Could not read {path}: {e}",
|
|
280
|
+
UnicodeDecodeError: "Encoding error while reading {path}: {e}",
|
|
281
|
+
json.JSONDecodeError: "Error parsing JSON file at {path}: {e}",
|
|
282
|
+
ImportError: "Missing dependency parsing {path}: {e}",
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
|
|
286
|
+
def _resolve_exception_type(candidate: Any) -> type[BaseException] | None:
|
|
287
|
+
resolved = _resolve_lazy(candidate)
|
|
288
|
+
if isinstance(resolved, type) and issubclass(resolved, BaseException):
|
|
289
|
+
return resolved
|
|
290
|
+
return None
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
_OPTIONAL_ERROR_MESSAGE_FACTORIES: tuple[
|
|
294
|
+
tuple[Callable[[], type[BaseException] | None], str],
|
|
295
|
+
...,
|
|
296
|
+
] = (
|
|
297
|
+
(
|
|
298
|
+
lambda: _resolve_exception_type(YAMLError),
|
|
299
|
+
"Error parsing YAML file at {path}: {e}",
|
|
300
|
+
),
|
|
301
|
+
(
|
|
302
|
+
lambda: _resolve_exception_type(TOMLDecodeError),
|
|
303
|
+
"Error parsing TOML file at {path}: {e}",
|
|
304
|
+
),
|
|
305
|
+
)
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
_BASE_STRUCTURED_EXCEPTIONS = (
|
|
309
|
+
OSError,
|
|
310
|
+
UnicodeDecodeError,
|
|
311
|
+
json.JSONDecodeError,
|
|
312
|
+
ImportError,
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def _iter_optional_exceptions() -> list[type[BaseException]]:
|
|
317
|
+
errors: list[type[BaseException]] = []
|
|
318
|
+
for resolver, _ in _OPTIONAL_ERROR_MESSAGE_FACTORIES:
|
|
319
|
+
exc_type = resolver()
|
|
320
|
+
if exc_type is not None:
|
|
321
|
+
errors.append(exc_type)
|
|
322
|
+
return errors
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
def _is_structured_error(exc: Exception) -> bool:
|
|
326
|
+
if isinstance(exc, _BASE_STRUCTURED_EXCEPTIONS):
|
|
327
|
+
return True
|
|
328
|
+
for optional_exc in _iter_optional_exceptions():
|
|
329
|
+
if isinstance(exc, optional_exc):
|
|
330
|
+
return True
|
|
331
|
+
return False
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
def _format_structured_file_error(path: Path, e: Exception) -> str:
|
|
335
|
+
for exc, msg in _BASE_ERROR_MESSAGES.items():
|
|
336
|
+
if isinstance(e, exc):
|
|
337
|
+
return msg.format(path=path, e=e)
|
|
338
|
+
|
|
339
|
+
for resolver, msg in _OPTIONAL_ERROR_MESSAGE_FACTORIES:
|
|
340
|
+
exc_type = resolver()
|
|
341
|
+
if exc_type is not None and isinstance(e, exc_type):
|
|
342
|
+
return msg.format(path=path, e=e)
|
|
343
|
+
|
|
344
|
+
return f"Error parsing {path}: {e}"
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
class StructuredFileError(Exception):
|
|
348
|
+
"""Error while reading or parsing a structured file."""
|
|
349
|
+
|
|
350
|
+
def __init__(self, path: Path, original: Exception) -> None:
|
|
351
|
+
super().__init__(_format_structured_file_error(path, original))
|
|
352
|
+
self.path = path
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
def read_structured_file(path: Path) -> Any:
|
|
356
|
+
"""Read a JSON, YAML or TOML file and return parsed data."""
|
|
357
|
+
|
|
358
|
+
suffix = path.suffix.lower()
|
|
359
|
+
try:
|
|
360
|
+
parser = _get_parser(suffix)
|
|
361
|
+
except ValueError as e:
|
|
362
|
+
raise StructuredFileError(path, e) from e
|
|
363
|
+
try:
|
|
364
|
+
text = path.read_text(encoding="utf-8")
|
|
365
|
+
return parser(text)
|
|
366
|
+
except Exception as e:
|
|
367
|
+
if _is_structured_error(e):
|
|
368
|
+
raise StructuredFileError(path, e) from e
|
|
369
|
+
raise
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
def safe_write(
|
|
373
|
+
path: str | Path,
|
|
374
|
+
write: Callable[[Any], Any],
|
|
375
|
+
*,
|
|
376
|
+
mode: str = "w",
|
|
377
|
+
encoding: str | None = "utf-8",
|
|
378
|
+
atomic: bool = True,
|
|
379
|
+
sync: bool | None = None,
|
|
380
|
+
**open_kwargs: Any,
|
|
381
|
+
) -> None:
|
|
382
|
+
"""Write to ``path`` ensuring parent directory exists and handle errors.
|
|
383
|
+
|
|
384
|
+
Parameters
|
|
385
|
+
----------
|
|
386
|
+
path:
|
|
387
|
+
Destination file path.
|
|
388
|
+
write:
|
|
389
|
+
Callback receiving the opened file object and performing the actual
|
|
390
|
+
write.
|
|
391
|
+
mode:
|
|
392
|
+
File mode passed to :func:`open`. Text modes (default) use UTF-8
|
|
393
|
+
encoding unless ``encoding`` is ``None``. When a binary mode is used
|
|
394
|
+
(``'b'`` in ``mode``) no encoding parameter is supplied so
|
|
395
|
+
``write`` may write bytes.
|
|
396
|
+
encoding:
|
|
397
|
+
Encoding for text modes. Ignored for binary modes.
|
|
398
|
+
atomic:
|
|
399
|
+
When ``True`` (default) writes to a temporary file and atomically
|
|
400
|
+
replaces the destination after flushing to disk. When ``False``
|
|
401
|
+
writes directly to ``path`` without any atomicity guarantee.
|
|
402
|
+
sync:
|
|
403
|
+
When ``True`` flushes and fsyncs the file descriptor after writing.
|
|
404
|
+
``None`` uses ``atomic`` to determine syncing behaviour.
|
|
405
|
+
"""
|
|
406
|
+
|
|
407
|
+
path = Path(path)
|
|
408
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
409
|
+
open_params = dict(mode=mode, **open_kwargs)
|
|
410
|
+
if "b" not in mode and encoding is not None:
|
|
411
|
+
open_params["encoding"] = encoding
|
|
412
|
+
if sync is None:
|
|
413
|
+
sync = atomic
|
|
414
|
+
tmp_path: Path | None = None
|
|
415
|
+
try:
|
|
416
|
+
if atomic:
|
|
417
|
+
tmp_fd = tempfile.NamedTemporaryFile(dir=path.parent, delete=False)
|
|
418
|
+
tmp_path = Path(tmp_fd.name)
|
|
419
|
+
tmp_fd.close()
|
|
420
|
+
with open(tmp_path, **open_params) as fd:
|
|
421
|
+
write(fd)
|
|
422
|
+
if sync:
|
|
423
|
+
fd.flush()
|
|
424
|
+
os.fsync(fd.fileno())
|
|
425
|
+
try:
|
|
426
|
+
os.replace(tmp_path, path)
|
|
427
|
+
except OSError as e:
|
|
428
|
+
logger.error(
|
|
429
|
+
"Atomic replace failed for %s -> %s: %s", tmp_path, path, e
|
|
430
|
+
)
|
|
431
|
+
raise
|
|
432
|
+
else:
|
|
433
|
+
with open(path, **open_params) as fd:
|
|
434
|
+
write(fd)
|
|
435
|
+
if sync:
|
|
436
|
+
fd.flush()
|
|
437
|
+
os.fsync(fd.fileno())
|
|
438
|
+
except (OSError, ValueError, TypeError) as e:
|
|
439
|
+
raise type(e)(f"Failed to write file {path}: {e}") from e
|
|
440
|
+
finally:
|
|
441
|
+
if tmp_path is not None:
|
|
442
|
+
tmp_path.unlink(missing_ok=True)
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
__all__ = (
|
|
446
|
+
"JsonDumpsParams",
|
|
447
|
+
"DEFAULT_PARAMS",
|
|
448
|
+
"clear_orjson_param_warnings",
|
|
449
|
+
"json_dumps",
|
|
450
|
+
"read_structured_file",
|
|
451
|
+
"safe_write",
|
|
452
|
+
"StructuredFileError",
|
|
453
|
+
"TOMLDecodeError",
|
|
454
|
+
"YAMLError",
|
|
455
|
+
)
|
|
456
|
+
|
|
@@ -2,8 +2,9 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
-
from collections.abc import Iterable, Sequence
|
|
6
5
|
import math
|
|
6
|
+
from collections.abc import Iterable, Sequence
|
|
7
|
+
from typing import Any
|
|
7
8
|
|
|
8
9
|
__all__ = (
|
|
9
10
|
"clamp",
|
|
@@ -12,36 +13,31 @@ __all__ = (
|
|
|
12
13
|
"similarity_abs",
|
|
13
14
|
"kahan_sum_nd",
|
|
14
15
|
"angle_diff",
|
|
16
|
+
"angle_diff_array",
|
|
15
17
|
)
|
|
16
18
|
|
|
17
19
|
|
|
18
20
|
def clamp(x: float, a: float, b: float) -> float:
|
|
19
21
|
"""Return ``x`` clamped to the ``[a, b]`` interval."""
|
|
22
|
+
|
|
20
23
|
return max(a, min(b, x))
|
|
21
24
|
|
|
22
25
|
|
|
23
26
|
def clamp01(x: float) -> float:
|
|
24
27
|
"""Clamp ``x`` to the ``[0,1]`` interval."""
|
|
28
|
+
|
|
25
29
|
return clamp(float(x), 0.0, 1.0)
|
|
26
30
|
|
|
27
31
|
|
|
28
32
|
def within_range(val: float, lower: float, upper: float, tol: float = 1e-9) -> bool:
|
|
29
|
-
"""Return ``True`` if ``val`` lies in ``[lower, upper]`` within ``tol``.
|
|
30
|
-
|
|
31
|
-
The comparison uses absolute differences instead of :func:`math.isclose`.
|
|
32
|
-
"""
|
|
33
|
+
"""Return ``True`` if ``val`` lies in ``[lower, upper]`` within ``tol``."""
|
|
33
34
|
|
|
34
35
|
v = float(val)
|
|
35
36
|
return lower <= v <= upper or abs(v - lower) <= tol or abs(v - upper) <= tol
|
|
36
37
|
|
|
37
38
|
|
|
38
39
|
def _norm01(x: float, lo: float, hi: float) -> float:
|
|
39
|
-
"""Normalize ``x`` to the unit interval given bounds.
|
|
40
|
-
|
|
41
|
-
``lo`` and ``hi`` delimit the original value range. When ``hi`` is not
|
|
42
|
-
greater than ``lo`` the function returns ``0.0`` to avoid division by
|
|
43
|
-
zero. The result is clamped to ``[0,1]``.
|
|
44
|
-
"""
|
|
40
|
+
"""Normalize ``x`` to the unit interval given bounds."""
|
|
45
41
|
|
|
46
42
|
if hi <= lo:
|
|
47
43
|
return 0.0
|
|
@@ -49,23 +45,14 @@ def _norm01(x: float, lo: float, hi: float) -> float:
|
|
|
49
45
|
|
|
50
46
|
|
|
51
47
|
def similarity_abs(a: float, b: float, lo: float, hi: float) -> float:
|
|
52
|
-
"""Return absolute similarity of ``a`` and ``b`` over ``[lo, hi]``.
|
|
53
|
-
|
|
54
|
-
It computes ``1`` minus the normalized absolute difference between
|
|
55
|
-
``a`` and ``b``. Values are scaled using :func:`_norm01` so the result
|
|
56
|
-
falls within ``[0,1]``.
|
|
57
|
-
"""
|
|
48
|
+
"""Return absolute similarity of ``a`` and ``b`` over ``[lo, hi]``."""
|
|
58
49
|
|
|
59
50
|
return 1.0 - _norm01(abs(float(a) - float(b)), 0.0, hi - lo)
|
|
60
51
|
|
|
61
|
-
def kahan_sum_nd(
|
|
62
|
-
values: Iterable[Sequence[float]], dims: int
|
|
63
|
-
) -> tuple[float, ...]:
|
|
64
|
-
"""Return compensated sums of ``values`` with ``dims`` components.
|
|
65
52
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
53
|
+
def kahan_sum_nd(values: Iterable[Sequence[float]], dims: int) -> tuple[float, ...]:
|
|
54
|
+
"""Return compensated sums of ``values`` with ``dims`` components."""
|
|
55
|
+
|
|
69
56
|
if dims < 1:
|
|
70
57
|
raise ValueError("dims must be >= 1")
|
|
71
58
|
totals = [0.0] * dims
|
|
@@ -84,4 +71,44 @@ def kahan_sum_nd(
|
|
|
84
71
|
|
|
85
72
|
def angle_diff(a: float, b: float) -> float:
|
|
86
73
|
"""Return the minimal difference between two angles in radians."""
|
|
74
|
+
|
|
87
75
|
return (float(a) - float(b) + math.pi) % math.tau - math.pi
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def angle_diff_array(
|
|
79
|
+
a: Sequence[float] | "np.ndarray",
|
|
80
|
+
b: Sequence[float] | "np.ndarray",
|
|
81
|
+
*,
|
|
82
|
+
np: Any,
|
|
83
|
+
out: "np.ndarray | None" = None,
|
|
84
|
+
where: "np.ndarray | None" = None,
|
|
85
|
+
) -> "np.ndarray":
|
|
86
|
+
"""Vectorised :func:`angle_diff` compatible with NumPy arrays."""
|
|
87
|
+
|
|
88
|
+
if np is None:
|
|
89
|
+
raise TypeError("angle_diff_array requires a NumPy module")
|
|
90
|
+
|
|
91
|
+
kwargs = {"where": where} if where is not None else {}
|
|
92
|
+
minuend = np.asarray(a, dtype=float)
|
|
93
|
+
subtrahend = np.asarray(b, dtype=float)
|
|
94
|
+
if out is None:
|
|
95
|
+
out = np.empty_like(minuend, dtype=float)
|
|
96
|
+
if where is not None:
|
|
97
|
+
out.fill(0.0)
|
|
98
|
+
else:
|
|
99
|
+
if getattr(out, "shape", None) != minuend.shape:
|
|
100
|
+
raise ValueError("out must match the broadcasted shape of inputs")
|
|
101
|
+
|
|
102
|
+
np.subtract(minuend, subtrahend, out=out, **kwargs)
|
|
103
|
+
np.add(out, math.pi, out=out, **kwargs)
|
|
104
|
+
if where is not None:
|
|
105
|
+
mask = np.asarray(where, dtype=bool)
|
|
106
|
+
if mask.shape != out.shape:
|
|
107
|
+
raise ValueError("where mask must match the broadcasted shape of inputs")
|
|
108
|
+
selected = out[mask]
|
|
109
|
+
if selected.size:
|
|
110
|
+
out[mask] = np.remainder(selected, math.tau)
|
|
111
|
+
else:
|
|
112
|
+
np.remainder(out, math.tau, out=out)
|
|
113
|
+
np.subtract(out, math.pi, out=out, **kwargs)
|
|
114
|
+
return out
|
tnfr/utils/numeric.pyi
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Iterable, Sequence
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
__all__: tuple[str, ...]
|
|
7
|
+
|
|
8
|
+
def clamp(x: float, a: float, b: float) -> float: ...
|
|
9
|
+
def clamp01(x: float) -> float: ...
|
|
10
|
+
def within_range(val: float, lower: float, upper: float, tol: float = ...) -> bool: ...
|
|
11
|
+
def similarity_abs(a: float, b: float, lo: float, hi: float) -> float: ...
|
|
12
|
+
def kahan_sum_nd(values: Iterable[Sequence[float]], dims: int) -> tuple[float, ...]: ...
|
|
13
|
+
def angle_diff(a: float, b: float) -> float: ...
|
|
14
|
+
def angle_diff_array(
|
|
15
|
+
a: Sequence[float] | Any,
|
|
16
|
+
b: Sequence[float] | Any,
|
|
17
|
+
*,
|
|
18
|
+
np: Any,
|
|
19
|
+
out: Any | None = ...,
|
|
20
|
+
where: Any | None = ...,
|
|
21
|
+
) -> Any: ...
|