tnfr 4.5.1__py3-none-any.whl → 6.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tnfr/__init__.py +270 -90
- tnfr/__init__.pyi +40 -0
- tnfr/_compat.py +11 -0
- tnfr/_version.py +7 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +631 -0
- tnfr/alias.pyi +140 -0
- tnfr/cache.py +732 -0
- tnfr/cache.pyi +232 -0
- tnfr/callback_utils.py +381 -0
- tnfr/callback_utils.pyi +105 -0
- tnfr/cli/__init__.py +89 -0
- tnfr/cli/__init__.pyi +47 -0
- tnfr/cli/arguments.py +199 -0
- tnfr/cli/arguments.pyi +33 -0
- tnfr/cli/execution.py +322 -0
- tnfr/cli/execution.pyi +80 -0
- tnfr/cli/utils.py +34 -0
- tnfr/cli/utils.pyi +8 -0
- tnfr/config/__init__.py +12 -0
- tnfr/config/__init__.pyi +8 -0
- tnfr/config/constants.py +104 -0
- tnfr/config/constants.pyi +12 -0
- tnfr/config/init.py +36 -0
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +106 -0
- tnfr/config/operator_names.pyi +28 -0
- tnfr/config/presets.py +104 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/constants/__init__.py +228 -0
- tnfr/constants/__init__.pyi +104 -0
- tnfr/constants/core.py +158 -0
- tnfr/constants/core.pyi +17 -0
- tnfr/constants/init.py +31 -0
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +102 -0
- tnfr/constants/metric.pyi +19 -0
- tnfr/constants_glyphs.py +16 -0
- tnfr/constants_glyphs.pyi +12 -0
- tnfr/dynamics/__init__.py +136 -0
- tnfr/dynamics/__init__.pyi +83 -0
- tnfr/dynamics/adaptation.py +201 -0
- tnfr/dynamics/aliases.py +22 -0
- tnfr/dynamics/coordination.py +343 -0
- tnfr/dynamics/dnfr.py +2315 -0
- tnfr/dynamics/dnfr.pyi +33 -0
- tnfr/dynamics/integrators.py +561 -0
- tnfr/dynamics/integrators.pyi +35 -0
- tnfr/dynamics/runtime.py +521 -0
- tnfr/dynamics/sampling.py +34 -0
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +680 -0
- tnfr/execution.py +216 -0
- tnfr/execution.pyi +65 -0
- tnfr/flatten.py +283 -0
- tnfr/flatten.pyi +28 -0
- tnfr/gamma.py +320 -89
- tnfr/gamma.pyi +40 -0
- tnfr/glyph_history.py +337 -0
- tnfr/glyph_history.pyi +53 -0
- tnfr/grammar.py +23 -153
- tnfr/grammar.pyi +13 -0
- tnfr/helpers/__init__.py +151 -0
- tnfr/helpers/__init__.pyi +66 -0
- tnfr/helpers/numeric.py +88 -0
- tnfr/helpers/numeric.pyi +12 -0
- tnfr/immutable.py +214 -0
- tnfr/immutable.pyi +37 -0
- tnfr/initialization.py +199 -0
- tnfr/initialization.pyi +73 -0
- tnfr/io.py +311 -0
- tnfr/io.pyi +11 -0
- tnfr/locking.py +37 -0
- tnfr/locking.pyi +7 -0
- tnfr/metrics/__init__.py +41 -0
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/coherence.py +1469 -0
- tnfr/metrics/common.py +149 -0
- tnfr/metrics/common.pyi +15 -0
- tnfr/metrics/core.py +259 -0
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +840 -0
- tnfr/metrics/diagnosis.pyi +89 -0
- tnfr/metrics/export.py +151 -0
- tnfr/metrics/glyph_timing.py +369 -0
- tnfr/metrics/reporting.py +152 -0
- tnfr/metrics/reporting.pyi +12 -0
- tnfr/metrics/sense_index.py +294 -0
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +216 -0
- tnfr/metrics/trig.pyi +12 -0
- tnfr/metrics/trig_cache.py +105 -0
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/node.py +255 -177
- tnfr/node.pyi +161 -0
- tnfr/observers.py +154 -150
- tnfr/observers.pyi +46 -0
- tnfr/ontosim.py +135 -134
- tnfr/ontosim.pyi +33 -0
- tnfr/operators/__init__.py +452 -0
- tnfr/operators/__init__.pyi +31 -0
- tnfr/operators/definitions.py +181 -0
- tnfr/operators/definitions.pyi +92 -0
- tnfr/operators/jitter.py +266 -0
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/registry.py +80 -0
- tnfr/operators/registry.pyi +15 -0
- tnfr/operators/remesh.py +569 -0
- tnfr/presets.py +10 -23
- tnfr/presets.pyi +7 -0
- tnfr/py.typed +0 -0
- tnfr/rng.py +440 -0
- tnfr/rng.pyi +14 -0
- tnfr/selector.py +217 -0
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +307 -142
- tnfr/sense.pyi +30 -0
- tnfr/structural.py +69 -164
- tnfr/structural.pyi +46 -0
- tnfr/telemetry/__init__.py +13 -0
- tnfr/telemetry/verbosity.py +37 -0
- tnfr/tokens.py +61 -0
- tnfr/tokens.pyi +41 -0
- tnfr/trace.py +520 -95
- tnfr/trace.pyi +68 -0
- tnfr/types.py +382 -17
- tnfr/types.pyi +145 -0
- tnfr/utils/__init__.py +158 -0
- tnfr/utils/__init__.pyi +133 -0
- tnfr/utils/cache.py +755 -0
- tnfr/utils/cache.pyi +156 -0
- tnfr/utils/data.py +267 -0
- tnfr/utils/data.pyi +73 -0
- tnfr/utils/graph.py +87 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +746 -0
- tnfr/utils/init.pyi +85 -0
- tnfr/utils/io.py +157 -0
- tnfr/utils/io.pyi +10 -0
- tnfr/utils/validators.py +130 -0
- tnfr/utils/validators.pyi +19 -0
- tnfr/validation/__init__.py +25 -0
- tnfr/validation/__init__.pyi +17 -0
- tnfr/validation/compatibility.py +59 -0
- tnfr/validation/compatibility.pyi +8 -0
- tnfr/validation/grammar.py +149 -0
- tnfr/validation/grammar.pyi +11 -0
- tnfr/validation/rules.py +194 -0
- tnfr/validation/rules.pyi +18 -0
- tnfr/validation/syntax.py +151 -0
- tnfr/validation/syntax.pyi +7 -0
- tnfr-6.0.0.dist-info/METADATA +135 -0
- tnfr-6.0.0.dist-info/RECORD +157 -0
- tnfr/cli.py +0 -322
- tnfr/config.py +0 -41
- tnfr/constants.py +0 -277
- tnfr/dynamics.py +0 -814
- tnfr/helpers.py +0 -264
- tnfr/main.py +0 -47
- tnfr/metrics.py +0 -597
- tnfr/operators.py +0 -525
- tnfr/program.py +0 -176
- tnfr/scenarios.py +0 -34
- tnfr/validators.py +0 -38
- tnfr-4.5.1.dist-info/METADATA +0 -221
- tnfr-4.5.1.dist-info/RECORD +0 -28
- {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/WHEEL +0 -0
- {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/licenses/LICENSE.md +0 -0
- {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/top_level.txt +0 -0
tnfr/utils/cache.pyi
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import threading
|
|
4
|
+
from collections.abc import Callable, Hashable, Iterable, Iterator, Mapping, MutableMapping
|
|
5
|
+
from typing import Any, ContextManager, Generic, TypeVar
|
|
6
|
+
|
|
7
|
+
import networkx as nx
|
|
8
|
+
|
|
9
|
+
from ..cache import CacheCapacityConfig, CacheManager
|
|
10
|
+
from ..types import GraphLike, NodeId, TNFRGraph, TimingContext
|
|
11
|
+
|
|
12
|
+
K = TypeVar("K", bound=Hashable)
|
|
13
|
+
V = TypeVar("V")
|
|
14
|
+
T = TypeVar("T")
|
|
15
|
+
|
|
16
|
+
__all__ = (
|
|
17
|
+
"EdgeCacheManager",
|
|
18
|
+
"NODE_SET_CHECKSUM_KEY",
|
|
19
|
+
"cached_node_list",
|
|
20
|
+
"cached_nodes_and_A",
|
|
21
|
+
"clear_node_repr_cache",
|
|
22
|
+
"configure_graph_cache_limits",
|
|
23
|
+
"edge_version_cache",
|
|
24
|
+
"edge_version_update",
|
|
25
|
+
"ensure_node_index_map",
|
|
26
|
+
"ensure_node_offset_map",
|
|
27
|
+
"get_graph_version",
|
|
28
|
+
"increment_edge_version",
|
|
29
|
+
"increment_graph_version",
|
|
30
|
+
"node_set_checksum",
|
|
31
|
+
"stable_json",
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
NODE_SET_CHECKSUM_KEY: str
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class LRUCache(MutableMapping[K, V], Generic[K, V]):
|
|
38
|
+
def __init__(self, maxsize: int = ...) -> None: ...
|
|
39
|
+
|
|
40
|
+
def __getitem__(self, __key: K) -> V: ...
|
|
41
|
+
|
|
42
|
+
def __setitem__(self, __key: K, __value: V) -> None: ...
|
|
43
|
+
|
|
44
|
+
def __delitem__(self, __key: K) -> None: ...
|
|
45
|
+
|
|
46
|
+
def __iter__(self) -> Iterator[K]: ...
|
|
47
|
+
|
|
48
|
+
def __len__(self) -> int: ...
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class EdgeCacheState:
|
|
52
|
+
cache: MutableMapping[Hashable, Any]
|
|
53
|
+
locks: MutableMapping[Hashable, threading.RLock]
|
|
54
|
+
max_entries: int | None
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class EdgeCacheManager:
|
|
58
|
+
_STATE_KEY: str
|
|
59
|
+
|
|
60
|
+
def __init__(self, graph: MutableMapping[str, Any]) -> None: ...
|
|
61
|
+
|
|
62
|
+
def record_hit(self) -> None: ...
|
|
63
|
+
|
|
64
|
+
def record_miss(self, *, track_metrics: bool = ...) -> None: ...
|
|
65
|
+
|
|
66
|
+
def record_eviction(self, *, track_metrics: bool = ...) -> None: ...
|
|
67
|
+
|
|
68
|
+
def timer(self) -> TimingContext: ...
|
|
69
|
+
|
|
70
|
+
def _default_state(self) -> EdgeCacheState: ...
|
|
71
|
+
|
|
72
|
+
def resolve_max_entries(self, max_entries: int | None | object) -> int | None: ...
|
|
73
|
+
|
|
74
|
+
def _build_state(self, max_entries: int | None) -> EdgeCacheState: ...
|
|
75
|
+
|
|
76
|
+
def _ensure_state(
|
|
77
|
+
self, state: EdgeCacheState | None, max_entries: int | None | object
|
|
78
|
+
) -> EdgeCacheState: ...
|
|
79
|
+
|
|
80
|
+
def _reset_state(self, state: EdgeCacheState | None) -> EdgeCacheState: ...
|
|
81
|
+
|
|
82
|
+
def get_cache(
|
|
83
|
+
self,
|
|
84
|
+
max_entries: int | None | object,
|
|
85
|
+
*,
|
|
86
|
+
create: bool = ...,
|
|
87
|
+
) -> tuple[
|
|
88
|
+
MutableMapping[Hashable, Any] | None,
|
|
89
|
+
MutableMapping[Hashable, threading.RLock] | None,
|
|
90
|
+
]: ...
|
|
91
|
+
|
|
92
|
+
def clear(self) -> None: ...
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def get_graph_version(graph: Any, key: str, default: int = ...) -> int: ...
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def increment_graph_version(graph: Any, key: str) -> int: ...
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def stable_json(obj: Any) -> str: ...
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def clear_node_repr_cache() -> None: ...
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def node_set_checksum(
|
|
108
|
+
G: nx.Graph,
|
|
109
|
+
nodes: Iterable[Any] | None = ...,
|
|
110
|
+
*,
|
|
111
|
+
presorted: bool = ...,
|
|
112
|
+
store: bool = ...,
|
|
113
|
+
) -> str: ...
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def cached_node_list(G: nx.Graph) -> tuple[Any, ...]: ...
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def ensure_node_index_map(G: TNFRGraph) -> dict[NodeId, int]: ...
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def ensure_node_offset_map(G: TNFRGraph) -> dict[NodeId, int]: ...
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def configure_graph_cache_limits(
|
|
126
|
+
G: GraphLike | TNFRGraph | MutableMapping[str, Any],
|
|
127
|
+
*,
|
|
128
|
+
default_capacity: int | None | object = CacheManager._MISSING,
|
|
129
|
+
overrides: Mapping[str, int | None] | None = ...,
|
|
130
|
+
replace_overrides: bool = ...,
|
|
131
|
+
) -> CacheCapacityConfig: ...
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def increment_edge_version(G: Any) -> None: ...
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def edge_version_cache(
|
|
138
|
+
G: Any,
|
|
139
|
+
key: Hashable,
|
|
140
|
+
builder: Callable[[], T],
|
|
141
|
+
*,
|
|
142
|
+
max_entries: int | None | object = CacheManager._MISSING,
|
|
143
|
+
) -> T: ...
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def cached_nodes_and_A(
|
|
147
|
+
G: nx.Graph,
|
|
148
|
+
*,
|
|
149
|
+
cache_size: int | None = ...,
|
|
150
|
+
require_numpy: bool = ...,
|
|
151
|
+
prefer_sparse: bool = ...,
|
|
152
|
+
nodes: tuple[Any, ...] | None = ...,
|
|
153
|
+
) -> tuple[tuple[Any, ...], Any]: ...
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def edge_version_update(G: TNFRGraph) -> ContextManager[None]: ...
|
tnfr/utils/data.py
ADDED
|
@@ -0,0 +1,267 @@
|
|
|
1
|
+
"""Utilities for manipulating collections and scalar values within TNFR."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from collections import deque
|
|
7
|
+
from collections.abc import Collection, Iterable, Mapping, Sequence
|
|
8
|
+
from itertools import islice
|
|
9
|
+
from typing import Any, Callable, Iterator, TypeVar, cast
|
|
10
|
+
|
|
11
|
+
from ..helpers.numeric import kahan_sum_nd
|
|
12
|
+
from .init import get_logger, warn_once as _warn_once_factory
|
|
13
|
+
|
|
14
|
+
T = TypeVar("T")
|
|
15
|
+
|
|
16
|
+
_collections_logger = get_logger("tnfr.utils.data.collections")
|
|
17
|
+
_value_logger = get_logger("tnfr.utils.data")
|
|
18
|
+
|
|
19
|
+
STRING_TYPES = (str, bytes, bytearray)
|
|
20
|
+
|
|
21
|
+
NEGATIVE_WEIGHTS_MSG = "Negative weights detected: %s"
|
|
22
|
+
|
|
23
|
+
_MAX_NEGATIVE_WARN_ONCE = 1024
|
|
24
|
+
|
|
25
|
+
__all__ = (
|
|
26
|
+
"convert_value",
|
|
27
|
+
"MAX_MATERIALIZE_DEFAULT",
|
|
28
|
+
"normalize_materialize_limit",
|
|
29
|
+
"is_non_string_sequence",
|
|
30
|
+
"flatten_structure",
|
|
31
|
+
"STRING_TYPES",
|
|
32
|
+
"ensure_collection",
|
|
33
|
+
"normalize_weights",
|
|
34
|
+
"negative_weights_warn_once",
|
|
35
|
+
"normalize_counter",
|
|
36
|
+
"mix_groups",
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def convert_value(
|
|
41
|
+
value: Any,
|
|
42
|
+
conv: Callable[[Any], T],
|
|
43
|
+
*,
|
|
44
|
+
strict: bool = False,
|
|
45
|
+
key: str | None = None,
|
|
46
|
+
log_level: int | None = None,
|
|
47
|
+
) -> tuple[bool, T | None]:
|
|
48
|
+
"""Attempt to convert a value and report failures."""
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
return True, conv(value)
|
|
52
|
+
except (ValueError, TypeError) as exc:
|
|
53
|
+
if strict:
|
|
54
|
+
raise
|
|
55
|
+
level = log_level if log_level is not None else logging.DEBUG
|
|
56
|
+
if key is not None:
|
|
57
|
+
_value_logger.log(level, "Could not convert value for %r: %s", key, exc)
|
|
58
|
+
else:
|
|
59
|
+
_value_logger.log(level, "Could not convert value: %s", exc)
|
|
60
|
+
return False, None
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def negative_weights_warn_once(
|
|
64
|
+
*, maxsize: int = _MAX_NEGATIVE_WARN_ONCE
|
|
65
|
+
) -> Callable[[Mapping[str, float]], None]:
|
|
66
|
+
"""Return a ``WarnOnce`` callable for negative weight warnings."""
|
|
67
|
+
|
|
68
|
+
return _warn_once_factory(_collections_logger, NEGATIVE_WEIGHTS_MSG, maxsize=maxsize)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _log_negative_weights(negatives: Mapping[str, float]) -> None:
|
|
72
|
+
"""Log negative weight warnings without deduplicating keys."""
|
|
73
|
+
|
|
74
|
+
_collections_logger.warning(NEGATIVE_WEIGHTS_MSG, negatives)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _resolve_negative_warn_handler(
|
|
78
|
+
warn_once: bool | Callable[[Mapping[str, float]], None]
|
|
79
|
+
) -> Callable[[Mapping[str, float]], None]:
|
|
80
|
+
"""Return a callable that logs negative weight warnings."""
|
|
81
|
+
|
|
82
|
+
if callable(warn_once):
|
|
83
|
+
return warn_once
|
|
84
|
+
if warn_once:
|
|
85
|
+
return negative_weights_warn_once()
|
|
86
|
+
return _log_negative_weights
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def is_non_string_sequence(obj: Any) -> bool:
|
|
90
|
+
"""Return ``True`` if ``obj`` is an ``Iterable`` but not string-like or a mapping."""
|
|
91
|
+
|
|
92
|
+
return isinstance(obj, Iterable) and not isinstance(obj, (*STRING_TYPES, Mapping))
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def flatten_structure(
|
|
96
|
+
obj: Any,
|
|
97
|
+
*,
|
|
98
|
+
expand: Callable[[Any], Iterable[Any] | None] | None = None,
|
|
99
|
+
) -> Iterator[Any]:
|
|
100
|
+
"""Yield leaf items from ``obj`` following breadth-first semantics."""
|
|
101
|
+
|
|
102
|
+
stack = deque([obj])
|
|
103
|
+
seen: set[int] = set()
|
|
104
|
+
while stack:
|
|
105
|
+
item = stack.pop()
|
|
106
|
+
item_id = id(item)
|
|
107
|
+
if item_id in seen:
|
|
108
|
+
continue
|
|
109
|
+
if expand is not None:
|
|
110
|
+
replacement = expand(item)
|
|
111
|
+
if replacement is not None:
|
|
112
|
+
seen.add(item_id)
|
|
113
|
+
stack.extendleft(replacement)
|
|
114
|
+
continue
|
|
115
|
+
if is_non_string_sequence(item):
|
|
116
|
+
seen.add(item_id)
|
|
117
|
+
stack.extendleft(item)
|
|
118
|
+
else:
|
|
119
|
+
yield item
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
MAX_MATERIALIZE_DEFAULT: int = 1000
|
|
123
|
+
"""Default materialization limit used by :func:`ensure_collection`."""
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def normalize_materialize_limit(max_materialize: int | None) -> int | None:
|
|
127
|
+
"""Normalize and validate ``max_materialize`` returning a usable limit."""
|
|
128
|
+
|
|
129
|
+
if max_materialize is None:
|
|
130
|
+
return None
|
|
131
|
+
limit = int(max_materialize)
|
|
132
|
+
if limit < 0:
|
|
133
|
+
raise ValueError("'max_materialize' must be non-negative")
|
|
134
|
+
return limit
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def ensure_collection(
|
|
138
|
+
it: Iterable[T],
|
|
139
|
+
*,
|
|
140
|
+
max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
|
|
141
|
+
error_msg: str | None = None,
|
|
142
|
+
) -> Collection[T]:
|
|
143
|
+
"""Return ``it`` as a :class:`Collection`, materializing when needed."""
|
|
144
|
+
|
|
145
|
+
if isinstance(it, Collection):
|
|
146
|
+
if isinstance(it, STRING_TYPES):
|
|
147
|
+
return (cast(T, it),)
|
|
148
|
+
else:
|
|
149
|
+
return it
|
|
150
|
+
|
|
151
|
+
if not isinstance(it, Iterable):
|
|
152
|
+
raise TypeError(f"{it!r} is not iterable")
|
|
153
|
+
|
|
154
|
+
limit = normalize_materialize_limit(max_materialize)
|
|
155
|
+
if limit is None:
|
|
156
|
+
return tuple(it)
|
|
157
|
+
if limit == 0:
|
|
158
|
+
return ()
|
|
159
|
+
|
|
160
|
+
items = tuple(islice(it, limit + 1))
|
|
161
|
+
if len(items) > limit:
|
|
162
|
+
examples = ", ".join(repr(x) for x in items[:3])
|
|
163
|
+
msg = error_msg or (
|
|
164
|
+
f"Iterable produced {len(items)} items, exceeds limit {limit}; first items: [{examples}]"
|
|
165
|
+
)
|
|
166
|
+
raise ValueError(msg)
|
|
167
|
+
return items
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def _convert_and_validate_weights(
|
|
171
|
+
dict_like: Mapping[str, Any],
|
|
172
|
+
keys: Iterable[str] | Sequence[str],
|
|
173
|
+
default: float,
|
|
174
|
+
*,
|
|
175
|
+
error_on_conversion: bool,
|
|
176
|
+
error_on_negative: bool,
|
|
177
|
+
warn_once: bool | Callable[[Mapping[str, float]], None],
|
|
178
|
+
) -> tuple[dict[str, float], list[str], float]:
|
|
179
|
+
"""Return converted weights, deduplicated keys and the accumulated total."""
|
|
180
|
+
|
|
181
|
+
keys_list = list(dict.fromkeys(keys))
|
|
182
|
+
default_float = float(default)
|
|
183
|
+
|
|
184
|
+
def convert(k: str) -> float:
|
|
185
|
+
ok, val = convert_value(
|
|
186
|
+
dict_like.get(k, default_float),
|
|
187
|
+
float,
|
|
188
|
+
strict=error_on_conversion,
|
|
189
|
+
key=k,
|
|
190
|
+
log_level=logging.WARNING,
|
|
191
|
+
)
|
|
192
|
+
return cast(float, val) if ok else default_float
|
|
193
|
+
|
|
194
|
+
weights = {k: convert(k) for k in keys_list}
|
|
195
|
+
negatives = {k: w for k, w in weights.items() if w < 0}
|
|
196
|
+
total = kahan_sum_nd(((w,) for w in weights.values()), dims=1)[0]
|
|
197
|
+
|
|
198
|
+
if negatives:
|
|
199
|
+
if error_on_negative:
|
|
200
|
+
raise ValueError(NEGATIVE_WEIGHTS_MSG % negatives)
|
|
201
|
+
warn_negative = _resolve_negative_warn_handler(warn_once)
|
|
202
|
+
warn_negative(negatives)
|
|
203
|
+
for key, weight in negatives.items():
|
|
204
|
+
weights[key] = 0.0
|
|
205
|
+
total -= weight
|
|
206
|
+
|
|
207
|
+
return weights, keys_list, total
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def normalize_weights(
|
|
211
|
+
dict_like: Mapping[str, Any],
|
|
212
|
+
keys: Iterable[str] | Sequence[str],
|
|
213
|
+
default: float = 0.0,
|
|
214
|
+
*,
|
|
215
|
+
error_on_negative: bool = False,
|
|
216
|
+
warn_once: bool | Callable[[Mapping[str, float]], None] = True,
|
|
217
|
+
error_on_conversion: bool = False,
|
|
218
|
+
) -> dict[str, float]:
|
|
219
|
+
"""Normalize ``keys`` in mapping ``dict_like`` so their sum is 1."""
|
|
220
|
+
|
|
221
|
+
weights, keys_list, total = _convert_and_validate_weights(
|
|
222
|
+
dict_like,
|
|
223
|
+
keys,
|
|
224
|
+
default,
|
|
225
|
+
error_on_conversion=error_on_conversion,
|
|
226
|
+
error_on_negative=error_on_negative,
|
|
227
|
+
warn_once=warn_once,
|
|
228
|
+
)
|
|
229
|
+
if not keys_list:
|
|
230
|
+
return {}
|
|
231
|
+
if total <= 0:
|
|
232
|
+
uniform = 1.0 / len(keys_list)
|
|
233
|
+
return {k: uniform for k in keys_list}
|
|
234
|
+
return {k: w / total for k, w in weights.items()}
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def normalize_counter(
|
|
238
|
+
counts: Mapping[str, float | int],
|
|
239
|
+
) -> tuple[dict[str, float], float]:
|
|
240
|
+
"""Normalize a ``Counter`` returning proportions and total."""
|
|
241
|
+
|
|
242
|
+
total = kahan_sum_nd(((c,) for c in counts.values()), dims=1)[0]
|
|
243
|
+
if total <= 0:
|
|
244
|
+
return {}, 0
|
|
245
|
+
dist = {k: v / total for k, v in counts.items() if v}
|
|
246
|
+
return dist, total
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def mix_groups(
|
|
250
|
+
dist: Mapping[str, float],
|
|
251
|
+
groups: Mapping[str, Iterable[str]],
|
|
252
|
+
*,
|
|
253
|
+
prefix: str = "_",
|
|
254
|
+
) -> dict[str, float]:
|
|
255
|
+
"""Aggregate values of ``dist`` according to ``groups``."""
|
|
256
|
+
|
|
257
|
+
out: dict[str, float] = dict(dist)
|
|
258
|
+
out.update(
|
|
259
|
+
{
|
|
260
|
+
f"{prefix}{label}": kahan_sum_nd(
|
|
261
|
+
((dist.get(k, 0.0),) for k in keys),
|
|
262
|
+
dims=1,
|
|
263
|
+
)[0]
|
|
264
|
+
for label, keys in groups.items()
|
|
265
|
+
}
|
|
266
|
+
)
|
|
267
|
+
return out
|
tnfr/utils/data.pyi
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence
|
|
4
|
+
from typing import Any, Callable, TypeVar
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T")
|
|
7
|
+
|
|
8
|
+
STRING_TYPES: tuple[type[str] | type[bytes] | type[bytearray], ...]
|
|
9
|
+
MAX_MATERIALIZE_DEFAULT: int
|
|
10
|
+
NEGATIVE_WEIGHTS_MSG: str
|
|
11
|
+
|
|
12
|
+
__all__: tuple[str, ...]
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def convert_value(
|
|
16
|
+
value: Any,
|
|
17
|
+
conv: Callable[[Any], T],
|
|
18
|
+
*,
|
|
19
|
+
strict: bool = ...,
|
|
20
|
+
key: str | None = ...,
|
|
21
|
+
log_level: int | None = ...,
|
|
22
|
+
) -> tuple[bool, T | None]: ...
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def negative_weights_warn_once(
|
|
26
|
+
*,
|
|
27
|
+
maxsize: int = ...,
|
|
28
|
+
) -> Callable[[Mapping[str, float]], None]: ...
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def is_non_string_sequence(obj: Any) -> bool: ...
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def flatten_structure(
|
|
35
|
+
obj: Any,
|
|
36
|
+
*,
|
|
37
|
+
expand: Callable[[Any], Iterable[Any] | None] | None = ...,
|
|
38
|
+
) -> Iterator[Any]: ...
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def normalize_materialize_limit(max_materialize: int | None) -> int | None: ...
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def ensure_collection(
|
|
45
|
+
it: Iterable[T],
|
|
46
|
+
*,
|
|
47
|
+
max_materialize: int | None = ...,
|
|
48
|
+
error_msg: str | None = ...,
|
|
49
|
+
) -> Collection[T]: ...
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def normalize_weights(
|
|
53
|
+
dict_like: Mapping[str, Any],
|
|
54
|
+
keys: Iterable[str] | Sequence[str],
|
|
55
|
+
default: float = ...,
|
|
56
|
+
*,
|
|
57
|
+
error_on_negative: bool = ...,
|
|
58
|
+
warn_once: bool | Callable[[Mapping[str, float]], None] = ...,
|
|
59
|
+
error_on_conversion: bool = ...,
|
|
60
|
+
) -> dict[str, float]: ...
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def normalize_counter(
|
|
64
|
+
counts: Mapping[str, float | int],
|
|
65
|
+
) -> tuple[dict[str, float], float]: ...
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def mix_groups(
|
|
69
|
+
dist: Mapping[str, float],
|
|
70
|
+
groups: Mapping[str, Iterable[str]],
|
|
71
|
+
*,
|
|
72
|
+
prefix: str = ...,
|
|
73
|
+
) -> dict[str, float]: ...
|
tnfr/utils/graph.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"""Utilities for graph-level bookkeeping shared by TNFR components."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import warnings
|
|
6
|
+
from types import MappingProxyType
|
|
7
|
+
from typing import Any, Mapping, MutableMapping
|
|
8
|
+
|
|
9
|
+
from ..types import GraphLike, TNFRGraph
|
|
10
|
+
|
|
11
|
+
__all__ = (
|
|
12
|
+
"get_graph",
|
|
13
|
+
"get_graph_mapping",
|
|
14
|
+
"mark_dnfr_prep_dirty",
|
|
15
|
+
"supports_add_edge",
|
|
16
|
+
"GraphLike",
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def get_graph(
|
|
21
|
+
obj: GraphLike | TNFRGraph | MutableMapping[str, Any]
|
|
22
|
+
) -> MutableMapping[str, Any]:
|
|
23
|
+
"""Return the graph-level metadata mapping for ``obj``.
|
|
24
|
+
|
|
25
|
+
``obj`` must be a :class:`~tnfr.types.TNFRGraph` instance or fulfil the
|
|
26
|
+
:class:`~tnfr.types.GraphLike` protocol. The function normalises access to
|
|
27
|
+
the ``graph`` attribute exposed by ``networkx``-style graphs and wrappers,
|
|
28
|
+
always returning the underlying metadata mapping. A pre-extracted mapping
|
|
29
|
+
is also accepted for legacy call sites.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
graph = getattr(obj, "graph", None)
|
|
33
|
+
if graph is not None:
|
|
34
|
+
return graph
|
|
35
|
+
if isinstance(obj, MutableMapping):
|
|
36
|
+
return obj
|
|
37
|
+
raise TypeError("Unsupported graph object: metadata mapping not accessible")
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def get_graph_mapping(
|
|
41
|
+
G: GraphLike | TNFRGraph | MutableMapping[str, Any], key: str, warn_msg: str
|
|
42
|
+
) -> Mapping[str, Any] | None:
|
|
43
|
+
"""Return an immutable view of ``G``'s stored mapping for ``key``.
|
|
44
|
+
|
|
45
|
+
The ``G`` argument follows the :class:`~tnfr.types.GraphLike` protocol, is
|
|
46
|
+
a concrete :class:`~tnfr.types.TNFRGraph` or provides the metadata mapping
|
|
47
|
+
directly. The helper validates that the stored value is a mapping before
|
|
48
|
+
returning a read-only proxy.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
graph = get_graph(G)
|
|
52
|
+
getter = getattr(graph, "get", None)
|
|
53
|
+
if getter is None:
|
|
54
|
+
return None
|
|
55
|
+
|
|
56
|
+
data = getter(key)
|
|
57
|
+
if data is None:
|
|
58
|
+
return None
|
|
59
|
+
if not isinstance(data, Mapping):
|
|
60
|
+
warnings.warn(warn_msg, UserWarning, stacklevel=2)
|
|
61
|
+
return None
|
|
62
|
+
return MappingProxyType(data)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def mark_dnfr_prep_dirty(
|
|
66
|
+
G: GraphLike | TNFRGraph | MutableMapping[str, Any]
|
|
67
|
+
) -> None:
|
|
68
|
+
"""Flag ΔNFR preparation data as stale by marking ``G.graph``.
|
|
69
|
+
|
|
70
|
+
``G`` is constrained to the :class:`~tnfr.types.GraphLike` protocol, a
|
|
71
|
+
concrete :class:`~tnfr.types.TNFRGraph` or an explicit metadata mapping,
|
|
72
|
+
ensuring the metadata storage is available for mutation.
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
graph = get_graph(G)
|
|
76
|
+
graph["_dnfr_prep_dirty"] = True
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def supports_add_edge(graph: GraphLike | TNFRGraph) -> bool:
|
|
80
|
+
"""Return ``True`` if ``graph`` exposes an ``add_edge`` method.
|
|
81
|
+
|
|
82
|
+
The ``graph`` parameter must implement :class:`~tnfr.types.GraphLike` or be
|
|
83
|
+
a :class:`~tnfr.types.TNFRGraph`, aligning runtime expectations with the
|
|
84
|
+
type contract enforced throughout the engine.
|
|
85
|
+
"""
|
|
86
|
+
|
|
87
|
+
return hasattr(graph, "add_edge")
|