tnfr 4.5.0__py3-none-any.whl → 4.5.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +91 -89
- tnfr/alias.py +546 -0
- tnfr/cache.py +578 -0
- tnfr/callback_utils.py +388 -0
- tnfr/cli/__init__.py +75 -0
- tnfr/cli/arguments.py +177 -0
- tnfr/cli/execution.py +288 -0
- tnfr/cli/utils.py +36 -0
- tnfr/collections_utils.py +300 -0
- tnfr/config.py +19 -28
- tnfr/constants/__init__.py +174 -0
- tnfr/constants/core.py +159 -0
- tnfr/constants/init.py +31 -0
- tnfr/constants/metric.py +110 -0
- tnfr/constants_glyphs.py +98 -0
- tnfr/dynamics/__init__.py +658 -0
- tnfr/dynamics/dnfr.py +733 -0
- tnfr/dynamics/integrators.py +267 -0
- tnfr/dynamics/sampling.py +31 -0
- tnfr/execution.py +201 -0
- tnfr/flatten.py +283 -0
- tnfr/gamma.py +302 -88
- tnfr/glyph_history.py +290 -0
- tnfr/grammar.py +285 -96
- tnfr/graph_utils.py +84 -0
- tnfr/helpers/__init__.py +71 -0
- tnfr/helpers/numeric.py +87 -0
- tnfr/immutable.py +178 -0
- tnfr/import_utils.py +228 -0
- tnfr/initialization.py +197 -0
- tnfr/io.py +246 -0
- tnfr/json_utils.py +162 -0
- tnfr/locking.py +37 -0
- tnfr/logging_utils.py +116 -0
- tnfr/metrics/__init__.py +41 -0
- tnfr/metrics/coherence.py +829 -0
- tnfr/metrics/common.py +151 -0
- tnfr/metrics/core.py +101 -0
- tnfr/metrics/diagnosis.py +234 -0
- tnfr/metrics/export.py +137 -0
- tnfr/metrics/glyph_timing.py +189 -0
- tnfr/metrics/reporting.py +148 -0
- tnfr/metrics/sense_index.py +120 -0
- tnfr/metrics/trig.py +181 -0
- tnfr/metrics/trig_cache.py +109 -0
- tnfr/node.py +214 -159
- tnfr/observers.py +126 -128
- tnfr/ontosim.py +134 -134
- tnfr/operators/__init__.py +420 -0
- tnfr/operators/jitter.py +203 -0
- tnfr/operators/remesh.py +485 -0
- tnfr/presets.py +46 -14
- tnfr/rng.py +254 -0
- tnfr/selector.py +210 -0
- tnfr/sense.py +284 -131
- tnfr/structural.py +207 -79
- tnfr/tokens.py +60 -0
- tnfr/trace.py +329 -94
- tnfr/types.py +43 -17
- tnfr/validators.py +70 -24
- tnfr/value_utils.py +59 -0
- tnfr-4.5.2.dist-info/METADATA +379 -0
- tnfr-4.5.2.dist-info/RECORD +67 -0
- tnfr/cli.py +0 -322
- tnfr/constants.py +0 -277
- tnfr/dynamics.py +0 -814
- tnfr/helpers.py +0 -264
- tnfr/main.py +0 -47
- tnfr/metrics.py +0 -597
- tnfr/operators.py +0 -525
- tnfr/program.py +0 -176
- tnfr/scenarios.py +0 -34
- tnfr-4.5.0.dist-info/METADATA +0 -109
- tnfr-4.5.0.dist-info/RECORD +0 -28
- {tnfr-4.5.0.dist-info → tnfr-4.5.2.dist-info}/WHEEL +0 -0
- {tnfr-4.5.0.dist-info → tnfr-4.5.2.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.0.dist-info → tnfr-4.5.2.dist-info}/licenses/LICENSE.md +0 -0
- {tnfr-4.5.0.dist-info → tnfr-4.5.2.dist-info}/top_level.txt +0 -0
tnfr/cli/execution.py
ADDED
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, Optional
|
|
7
|
+
|
|
8
|
+
import networkx as nx # type: ignore[import-untyped]
|
|
9
|
+
|
|
10
|
+
from ..constants import METRIC_DEFAULTS
|
|
11
|
+
from ..sense import register_sigma_callback
|
|
12
|
+
from ..metrics import (
|
|
13
|
+
register_metrics_callbacks,
|
|
14
|
+
glyph_top,
|
|
15
|
+
export_metrics,
|
|
16
|
+
build_metrics_summary,
|
|
17
|
+
)
|
|
18
|
+
from ..metrics.core import _metrics_step
|
|
19
|
+
from ..trace import register_trace
|
|
20
|
+
from ..execution import CANONICAL_PRESET_NAME, play, seq
|
|
21
|
+
from ..dynamics import (
|
|
22
|
+
run,
|
|
23
|
+
default_glyph_selector,
|
|
24
|
+
parametric_glyph_selector,
|
|
25
|
+
validate_canon,
|
|
26
|
+
)
|
|
27
|
+
from ..presets import get_preset
|
|
28
|
+
from ..config import apply_config
|
|
29
|
+
from ..io import read_structured_file, safe_write, StructuredFileError
|
|
30
|
+
from ..glyph_history import ensure_history
|
|
31
|
+
from ..ontosim import preparar_red
|
|
32
|
+
from ..logging_utils import get_logger
|
|
33
|
+
from ..types import Glyph
|
|
34
|
+
from ..json_utils import json_dumps
|
|
35
|
+
from ..flatten import parse_program_tokens
|
|
36
|
+
|
|
37
|
+
from .arguments import _args_to_dict
|
|
38
|
+
|
|
39
|
+
logger = get_logger(__name__)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
# CLI summaries should remain concise by default while allowing callers to
|
|
43
|
+
# inspect the full glyphogram series when needed.
|
|
44
|
+
DEFAULT_SUMMARY_SERIES_LIMIT = 10
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _save_json(path: str, data: Any) -> None:
|
|
48
|
+
payload = json_dumps(data, ensure_ascii=False, indent=2, default=list)
|
|
49
|
+
safe_write(path, lambda f: f.write(payload))
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _attach_callbacks(G: "nx.Graph") -> None:
|
|
53
|
+
register_sigma_callback(G)
|
|
54
|
+
register_metrics_callbacks(G)
|
|
55
|
+
register_trace(G)
|
|
56
|
+
_metrics_step(G, ctx=None)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _persist_history(G: "nx.Graph", args: argparse.Namespace) -> None:
|
|
60
|
+
if getattr(args, "save_history", None) or getattr(args, "export_history_base", None):
|
|
61
|
+
history = ensure_history(G)
|
|
62
|
+
if getattr(args, "save_history", None):
|
|
63
|
+
_save_json(args.save_history, history)
|
|
64
|
+
if getattr(args, "export_history_base", None):
|
|
65
|
+
export_metrics(G, args.export_history_base, fmt=args.export_format)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def build_basic_graph(args: argparse.Namespace) -> "nx.Graph":
|
|
69
|
+
n = args.nodes
|
|
70
|
+
topology = getattr(args, "topology", "ring").lower()
|
|
71
|
+
seed = getattr(args, "seed", None)
|
|
72
|
+
if topology == "ring":
|
|
73
|
+
G = nx.cycle_graph(n)
|
|
74
|
+
elif topology == "complete":
|
|
75
|
+
G = nx.complete_graph(n)
|
|
76
|
+
elif topology == "erdos":
|
|
77
|
+
if getattr(args, "p", None) is not None:
|
|
78
|
+
prob = float(args.p)
|
|
79
|
+
else:
|
|
80
|
+
if n <= 0:
|
|
81
|
+
fallback = 0.0
|
|
82
|
+
else:
|
|
83
|
+
fallback = 3.0 / n
|
|
84
|
+
prob = min(max(fallback, 0.0), 1.0)
|
|
85
|
+
if not 0.0 <= prob <= 1.0:
|
|
86
|
+
raise ValueError(f"p must be between 0 and 1; received {prob}")
|
|
87
|
+
G = nx.gnp_random_graph(n, prob, seed=seed)
|
|
88
|
+
else:
|
|
89
|
+
raise ValueError(
|
|
90
|
+
f"Invalid topology '{topology}'. Accepted options are: ring, complete, erdos"
|
|
91
|
+
)
|
|
92
|
+
if seed is not None:
|
|
93
|
+
G.graph["RANDOM_SEED"] = int(seed)
|
|
94
|
+
return G
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def apply_cli_config(G: "nx.Graph", args: argparse.Namespace) -> None:
|
|
98
|
+
if args.config:
|
|
99
|
+
apply_config(G, Path(args.config))
|
|
100
|
+
arg_map = {
|
|
101
|
+
"dt": ("DT", float),
|
|
102
|
+
"integrator": ("INTEGRATOR_METHOD", str),
|
|
103
|
+
"remesh_mode": ("REMESH_MODE", str),
|
|
104
|
+
"glyph_hysteresis_window": ("GLYPH_HYSTERESIS_WINDOW", int),
|
|
105
|
+
}
|
|
106
|
+
for attr, (key, conv) in arg_map.items():
|
|
107
|
+
val = getattr(args, attr, None)
|
|
108
|
+
if val is not None:
|
|
109
|
+
G.graph[key] = conv(val)
|
|
110
|
+
|
|
111
|
+
gcanon = {
|
|
112
|
+
**METRIC_DEFAULTS["GRAMMAR_CANON"],
|
|
113
|
+
**_args_to_dict(args, prefix="grammar_"),
|
|
114
|
+
}
|
|
115
|
+
if getattr(args, "grammar_canon", None) is not None:
|
|
116
|
+
gcanon["enabled"] = bool(args.grammar_canon)
|
|
117
|
+
G.graph["GRAMMAR_CANON"] = gcanon
|
|
118
|
+
|
|
119
|
+
selector = getattr(args, "selector", None)
|
|
120
|
+
if selector is not None:
|
|
121
|
+
sel_map = {
|
|
122
|
+
"basic": default_glyph_selector,
|
|
123
|
+
"param": parametric_glyph_selector,
|
|
124
|
+
}
|
|
125
|
+
G.graph["glyph_selector"] = sel_map.get(
|
|
126
|
+
selector, default_glyph_selector
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
if hasattr(args, "gamma_type"):
|
|
130
|
+
G.graph["GAMMA"] = {
|
|
131
|
+
"type": args.gamma_type,
|
|
132
|
+
"beta": args.gamma_beta,
|
|
133
|
+
"R0": args.gamma_R0,
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def register_callbacks_and_observer(G: "nx.Graph") -> None:
|
|
138
|
+
_attach_callbacks(G)
|
|
139
|
+
validate_canon(G)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def _build_graph_from_args(args: argparse.Namespace) -> "nx.Graph":
|
|
143
|
+
G = build_basic_graph(args)
|
|
144
|
+
apply_cli_config(G, args)
|
|
145
|
+
if getattr(args, "observer", False):
|
|
146
|
+
G.graph["ATTACH_STD_OBSERVER"] = True
|
|
147
|
+
preparar_red(G)
|
|
148
|
+
register_callbacks_and_observer(G)
|
|
149
|
+
return G
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def _load_sequence(path: Path) -> list[Any]:
|
|
153
|
+
try:
|
|
154
|
+
data = read_structured_file(path)
|
|
155
|
+
except (StructuredFileError, OSError) as exc:
|
|
156
|
+
if isinstance(exc, StructuredFileError):
|
|
157
|
+
message = str(exc)
|
|
158
|
+
else:
|
|
159
|
+
message = str(StructuredFileError(path, exc))
|
|
160
|
+
logger.error("%s", message)
|
|
161
|
+
raise SystemExit(1) from exc
|
|
162
|
+
return parse_program_tokens(data)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def resolve_program(
|
|
166
|
+
args: argparse.Namespace, default: Optional[Any] = None
|
|
167
|
+
) -> Optional[Any]:
|
|
168
|
+
if getattr(args, "preset", None):
|
|
169
|
+
try:
|
|
170
|
+
return get_preset(args.preset)
|
|
171
|
+
except KeyError as exc:
|
|
172
|
+
logger.error(
|
|
173
|
+
"Preset desconocido '%s'. Usa --sequence-file para cargar secuencias personalizadas",
|
|
174
|
+
args.preset,
|
|
175
|
+
)
|
|
176
|
+
raise SystemExit(1) from exc
|
|
177
|
+
if getattr(args, "sequence_file", None):
|
|
178
|
+
return _load_sequence(Path(args.sequence_file))
|
|
179
|
+
return default
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def run_program(
|
|
183
|
+
G: Optional["nx.Graph"], program: Optional[Any], args: argparse.Namespace
|
|
184
|
+
) -> "nx.Graph":
|
|
185
|
+
if G is None:
|
|
186
|
+
G = _build_graph_from_args(args)
|
|
187
|
+
|
|
188
|
+
if program is None:
|
|
189
|
+
steps = getattr(args, "steps", 100)
|
|
190
|
+
steps = 100 if steps is None else int(steps)
|
|
191
|
+
if steps < 0:
|
|
192
|
+
steps = 0
|
|
193
|
+
|
|
194
|
+
run_kwargs: dict[str, Any] = {}
|
|
195
|
+
for attr in ("dt", "use_Si", "apply_glyphs"):
|
|
196
|
+
value = getattr(args, attr, None)
|
|
197
|
+
if value is not None:
|
|
198
|
+
run_kwargs[attr] = value
|
|
199
|
+
|
|
200
|
+
run(G, steps=steps, **run_kwargs)
|
|
201
|
+
else:
|
|
202
|
+
play(G, program)
|
|
203
|
+
|
|
204
|
+
_persist_history(G, args)
|
|
205
|
+
return G
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def _run_cli_program(
|
|
209
|
+
args: argparse.Namespace,
|
|
210
|
+
*,
|
|
211
|
+
default_program: Optional[Any] = None,
|
|
212
|
+
graph: Optional["nx.Graph"] = None,
|
|
213
|
+
) -> tuple[int, Optional["nx.Graph"]]:
|
|
214
|
+
try:
|
|
215
|
+
program = resolve_program(args, default=default_program)
|
|
216
|
+
except SystemExit as exc:
|
|
217
|
+
code = exc.code if isinstance(exc.code, int) else 1
|
|
218
|
+
return code or 1, None
|
|
219
|
+
|
|
220
|
+
result_graph = run_program(graph, program, args)
|
|
221
|
+
return 0, result_graph
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def _log_run_summaries(G: "nx.Graph", args: argparse.Namespace) -> None:
|
|
225
|
+
cfg_coh = G.graph.get("COHERENCE", METRIC_DEFAULTS["COHERENCE"])
|
|
226
|
+
cfg_diag = G.graph.get("DIAGNOSIS", METRIC_DEFAULTS["DIAGNOSIS"])
|
|
227
|
+
hist = ensure_history(G)
|
|
228
|
+
|
|
229
|
+
if cfg_coh.get("enabled", True):
|
|
230
|
+
Wstats = hist.get(cfg_coh.get("stats_history_key", "W_stats"), [])
|
|
231
|
+
if Wstats:
|
|
232
|
+
logger.info("[COHERENCE] último paso: %s", Wstats[-1])
|
|
233
|
+
|
|
234
|
+
if cfg_diag.get("enabled", True):
|
|
235
|
+
last_diag = hist.get(cfg_diag.get("history_key", "nodal_diag"), [])
|
|
236
|
+
if last_diag:
|
|
237
|
+
sample = list(last_diag[-1].values())[:3]
|
|
238
|
+
logger.info("[DIAGNOSIS] ejemplo: %s", sample)
|
|
239
|
+
|
|
240
|
+
if args.summary:
|
|
241
|
+
summary_limit = getattr(args, "summary_limit", DEFAULT_SUMMARY_SERIES_LIMIT)
|
|
242
|
+
summary, has_latency_values = build_metrics_summary(
|
|
243
|
+
G, series_limit=summary_limit
|
|
244
|
+
)
|
|
245
|
+
logger.info("Tg global: %s", summary["Tg_global"])
|
|
246
|
+
logger.info("Top operadores por Tg: %s", glyph_top(G, k=5))
|
|
247
|
+
if has_latency_values:
|
|
248
|
+
logger.info("Latencia media: %s", summary["latency_mean"])
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def cmd_run(args: argparse.Namespace) -> int:
|
|
252
|
+
code, graph = _run_cli_program(args)
|
|
253
|
+
if code != 0:
|
|
254
|
+
return code
|
|
255
|
+
|
|
256
|
+
if graph is not None:
|
|
257
|
+
_log_run_summaries(graph, args)
|
|
258
|
+
return 0
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def cmd_sequence(args: argparse.Namespace) -> int:
|
|
262
|
+
if args.preset and args.sequence_file:
|
|
263
|
+
logger.error(
|
|
264
|
+
"No se puede usar --preset y --sequence-file al mismo tiempo"
|
|
265
|
+
)
|
|
266
|
+
return 1
|
|
267
|
+
code, _ = _run_cli_program(
|
|
268
|
+
args, default_program=get_preset(CANONICAL_PRESET_NAME)
|
|
269
|
+
)
|
|
270
|
+
return code
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
def cmd_metrics(args: argparse.Namespace) -> int:
|
|
274
|
+
if getattr(args, "steps", None) is None:
|
|
275
|
+
# Default a longer run for metrics stability
|
|
276
|
+
args.steps = 200
|
|
277
|
+
|
|
278
|
+
code, graph = _run_cli_program(args)
|
|
279
|
+
if code != 0 or graph is None:
|
|
280
|
+
return code
|
|
281
|
+
|
|
282
|
+
summary_limit = getattr(args, "summary_limit", None)
|
|
283
|
+
out, _ = build_metrics_summary(graph, series_limit=summary_limit)
|
|
284
|
+
if args.save:
|
|
285
|
+
_save_json(args.save, out)
|
|
286
|
+
else:
|
|
287
|
+
logger.info("%s", json_dumps(out))
|
|
288
|
+
return 0
|
tnfr/cli/utils.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"""Utilities for CLI modules."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def spec(opt: str, /, **kwargs: Any) -> tuple[str, dict[str, Any]]:
|
|
9
|
+
"""Create an argument specification pair.
|
|
10
|
+
|
|
11
|
+
Parameters
|
|
12
|
+
----------
|
|
13
|
+
opt:
|
|
14
|
+
Option string to register, e.g. ``"--foo"``.
|
|
15
|
+
**kwargs:
|
|
16
|
+
Keyword arguments forwarded to
|
|
17
|
+
:meth:`argparse.ArgumentParser.add_argument`.
|
|
18
|
+
|
|
19
|
+
Returns
|
|
20
|
+
-------
|
|
21
|
+
tuple[str, dict[str, Any]]
|
|
22
|
+
A pair suitable for collecting into argument specification sequences.
|
|
23
|
+
If ``dest`` is not provided it is
|
|
24
|
+
derived from ``opt`` by stripping leading dashes and replacing dots and
|
|
25
|
+
hyphens with underscores. ``default`` defaults to ``None`` so missing
|
|
26
|
+
options can be filtered easily.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
kwargs = dict(kwargs)
|
|
30
|
+
kwargs.setdefault(
|
|
31
|
+
"dest", opt.lstrip("-").replace("-", "_").replace(".", "_")
|
|
32
|
+
)
|
|
33
|
+
kwargs.setdefault("default", None)
|
|
34
|
+
return opt, kwargs
|
|
35
|
+
|
|
36
|
+
|
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
"""Utilities for working with generic collections and weight mappings."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from collections import deque
|
|
7
|
+
from collections.abc import Collection, Iterable, Mapping, Sequence
|
|
8
|
+
from itertools import islice
|
|
9
|
+
from typing import Any, Callable, Iterator, TypeVar, cast
|
|
10
|
+
|
|
11
|
+
from .logging_utils import get_logger
|
|
12
|
+
from .logging_utils import warn_once as _warn_once_factory
|
|
13
|
+
from .value_utils import convert_value
|
|
14
|
+
from .helpers.numeric import kahan_sum_nd
|
|
15
|
+
|
|
16
|
+
T = TypeVar("T")
|
|
17
|
+
|
|
18
|
+
logger = get_logger(__name__)
|
|
19
|
+
|
|
20
|
+
STRING_TYPES = (str, bytes, bytearray)
|
|
21
|
+
|
|
22
|
+
NEGATIVE_WEIGHTS_MSG = "Negative weights detected: %s"
|
|
23
|
+
|
|
24
|
+
_NEGATIVE_WARN_ONCE_MAXSIZE = 1024
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def negative_weights_warn_once(
|
|
28
|
+
*, maxsize: int = _NEGATIVE_WARN_ONCE_MAXSIZE
|
|
29
|
+
) -> Callable[[Mapping[str, float]], None]:
|
|
30
|
+
"""Return a ``WarnOnce`` callable for negative weight warnings.
|
|
31
|
+
|
|
32
|
+
The returned callable may be reused across multiple
|
|
33
|
+
:func:`normalize_weights` invocations to suppress duplicate warnings for
|
|
34
|
+
the same keys.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
return _warn_once_factory(logger, NEGATIVE_WEIGHTS_MSG, maxsize=maxsize)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def _log_negative_weights(negatives: Mapping[str, float]) -> None:
|
|
41
|
+
"""Log negative weight warnings without deduplicating keys."""
|
|
42
|
+
|
|
43
|
+
logger.warning(NEGATIVE_WEIGHTS_MSG, negatives)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _resolve_negative_warn_handler(
|
|
47
|
+
warn_once: bool | Callable[[Mapping[str, float]], None]
|
|
48
|
+
) -> Callable[[Mapping[str, float]], None]:
|
|
49
|
+
"""Return a callable that logs negative weight warnings."""
|
|
50
|
+
|
|
51
|
+
if callable(warn_once):
|
|
52
|
+
return warn_once
|
|
53
|
+
if warn_once:
|
|
54
|
+
return negative_weights_warn_once()
|
|
55
|
+
return _log_negative_weights
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def is_non_string_sequence(obj: Any) -> bool:
|
|
59
|
+
"""Return ``True`` if ``obj`` is an ``Iterable`` but not string-like or a mapping."""
|
|
60
|
+
return isinstance(obj, Iterable) and not isinstance(obj, (*STRING_TYPES, Mapping))
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def flatten_structure(
|
|
64
|
+
obj: Any,
|
|
65
|
+
*,
|
|
66
|
+
expand: Callable[[Any], Iterable[Any] | None] | None = None,
|
|
67
|
+
) -> Iterator[Any]:
|
|
68
|
+
"""Yield leaf items from ``obj``.
|
|
69
|
+
|
|
70
|
+
The order of yielded items follows the order of the input iterable when it
|
|
71
|
+
is defined. For unordered iterables like :class:`set` the resulting order is
|
|
72
|
+
arbitrary. Mappings are treated as atomic items and not expanded.
|
|
73
|
+
|
|
74
|
+
Parameters
|
|
75
|
+
----------
|
|
76
|
+
obj:
|
|
77
|
+
Object that may contain nested iterables.
|
|
78
|
+
expand:
|
|
79
|
+
Optional callable returning a replacement iterable for ``item``. When
|
|
80
|
+
it returns ``None`` the ``item`` is processed normally.
|
|
81
|
+
"""
|
|
82
|
+
|
|
83
|
+
stack = deque([obj])
|
|
84
|
+
seen: set[int] = set()
|
|
85
|
+
while stack:
|
|
86
|
+
item = stack.pop()
|
|
87
|
+
item_id = id(item)
|
|
88
|
+
if item_id in seen:
|
|
89
|
+
continue
|
|
90
|
+
if expand is not None:
|
|
91
|
+
replacement = expand(item)
|
|
92
|
+
if replacement is not None:
|
|
93
|
+
seen.add(item_id)
|
|
94
|
+
stack.extendleft(replacement)
|
|
95
|
+
continue
|
|
96
|
+
if is_non_string_sequence(item):
|
|
97
|
+
seen.add(item_id)
|
|
98
|
+
stack.extendleft(item)
|
|
99
|
+
else:
|
|
100
|
+
yield item
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
__all__ = (
|
|
104
|
+
"MAX_MATERIALIZE_DEFAULT",
|
|
105
|
+
"normalize_materialize_limit",
|
|
106
|
+
"is_non_string_sequence",
|
|
107
|
+
"flatten_structure",
|
|
108
|
+
"STRING_TYPES",
|
|
109
|
+
"ensure_collection",
|
|
110
|
+
"normalize_weights",
|
|
111
|
+
"negative_weights_warn_once",
|
|
112
|
+
"normalize_counter",
|
|
113
|
+
"mix_groups",
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
MAX_MATERIALIZE_DEFAULT: int = 1000
|
|
117
|
+
"""Default materialization limit used by :func:`ensure_collection`.
|
|
118
|
+
|
|
119
|
+
This guard prevents accidentally consuming huge or infinite iterables when a
|
|
120
|
+
limit is not explicitly provided. Pass ``max_materialize=None`` to disable the
|
|
121
|
+
limit.
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def normalize_materialize_limit(max_materialize: int | None) -> int | None:
|
|
126
|
+
"""Normalize and validate ``max_materialize`` returning a usable limit."""
|
|
127
|
+
if max_materialize is None:
|
|
128
|
+
return None
|
|
129
|
+
limit = int(max_materialize)
|
|
130
|
+
if limit < 0:
|
|
131
|
+
raise ValueError("'max_materialize' must be non-negative")
|
|
132
|
+
return limit
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def ensure_collection(
|
|
136
|
+
it: Iterable[T],
|
|
137
|
+
*,
|
|
138
|
+
max_materialize: int | None = MAX_MATERIALIZE_DEFAULT,
|
|
139
|
+
error_msg: str | None = None,
|
|
140
|
+
) -> Collection[T]:
|
|
141
|
+
"""Return ``it`` as a :class:`Collection`, materializing when needed.
|
|
142
|
+
|
|
143
|
+
Checks are executed in the following order:
|
|
144
|
+
|
|
145
|
+
1. Existing collections are returned directly. String-like inputs
|
|
146
|
+
(``str``, ``bytes`` and ``bytearray``) are wrapped as a single item
|
|
147
|
+
tuple.
|
|
148
|
+
2. The object must be an :class:`Iterable`; otherwise ``TypeError`` is
|
|
149
|
+
raised.
|
|
150
|
+
3. Remaining iterables are materialized up to ``max_materialize`` items.
|
|
151
|
+
``None`` disables the limit. ``error_msg`` customizes the
|
|
152
|
+
:class:`ValueError` raised when the iterable yields more items than
|
|
153
|
+
allowed. The input is consumed at most once and no extra items beyond the
|
|
154
|
+
limit are stored in memory.
|
|
155
|
+
"""
|
|
156
|
+
|
|
157
|
+
# Step 1: early-return for collections and raw strings/bytes
|
|
158
|
+
if isinstance(it, Collection):
|
|
159
|
+
if isinstance(it, STRING_TYPES):
|
|
160
|
+
return (cast(T, it),)
|
|
161
|
+
else:
|
|
162
|
+
return it
|
|
163
|
+
|
|
164
|
+
# Step 2: ensure the input is iterable
|
|
165
|
+
if not isinstance(it, Iterable):
|
|
166
|
+
raise TypeError(f"{it!r} is not iterable")
|
|
167
|
+
|
|
168
|
+
# Step 3: validate limit and materialize items once
|
|
169
|
+
limit = normalize_materialize_limit(max_materialize)
|
|
170
|
+
if limit is None:
|
|
171
|
+
return tuple(it)
|
|
172
|
+
if limit == 0:
|
|
173
|
+
return ()
|
|
174
|
+
|
|
175
|
+
items = tuple(islice(it, limit + 1))
|
|
176
|
+
if len(items) > limit:
|
|
177
|
+
examples = ", ".join(repr(x) for x in items[:3])
|
|
178
|
+
msg = error_msg or (
|
|
179
|
+
f"Iterable produced {len(items)} items, exceeds limit {limit}; first items: [{examples}]"
|
|
180
|
+
)
|
|
181
|
+
raise ValueError(msg)
|
|
182
|
+
return items
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def _convert_and_validate_weights(
|
|
186
|
+
dict_like: Mapping[str, Any],
|
|
187
|
+
keys: Iterable[str] | Sequence[str],
|
|
188
|
+
default: float,
|
|
189
|
+
*,
|
|
190
|
+
error_on_conversion: bool,
|
|
191
|
+
error_on_negative: bool,
|
|
192
|
+
warn_once: bool | Callable[[Mapping[str, float]], None],
|
|
193
|
+
) -> tuple[dict[str, float], list[str], float]:
|
|
194
|
+
"""Return converted weights, deduplicated keys and the accumulated total."""
|
|
195
|
+
|
|
196
|
+
keys_list = list(dict.fromkeys(keys))
|
|
197
|
+
default_float = float(default)
|
|
198
|
+
|
|
199
|
+
def convert(k: str) -> float:
|
|
200
|
+
ok, val = convert_value(
|
|
201
|
+
dict_like.get(k, default_float),
|
|
202
|
+
float,
|
|
203
|
+
strict=error_on_conversion,
|
|
204
|
+
key=k,
|
|
205
|
+
log_level=logging.WARNING,
|
|
206
|
+
)
|
|
207
|
+
return cast(float, val) if ok else default_float
|
|
208
|
+
|
|
209
|
+
weights = {k: convert(k) for k in keys_list}
|
|
210
|
+
negatives = {k: w for k, w in weights.items() if w < 0}
|
|
211
|
+
total = kahan_sum_nd(((w,) for w in weights.values()), dims=1)[0]
|
|
212
|
+
|
|
213
|
+
if negatives:
|
|
214
|
+
if error_on_negative:
|
|
215
|
+
raise ValueError(NEGATIVE_WEIGHTS_MSG % negatives)
|
|
216
|
+
warn_negative = _resolve_negative_warn_handler(warn_once)
|
|
217
|
+
warn_negative(negatives)
|
|
218
|
+
for key, weight in negatives.items():
|
|
219
|
+
weights[key] = 0.0
|
|
220
|
+
total -= weight
|
|
221
|
+
|
|
222
|
+
return weights, keys_list, total
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def normalize_weights(
|
|
226
|
+
dict_like: Mapping[str, Any],
|
|
227
|
+
keys: Iterable[str] | Sequence[str],
|
|
228
|
+
default: float = 0.0,
|
|
229
|
+
*,
|
|
230
|
+
error_on_negative: bool = False,
|
|
231
|
+
warn_once: bool | Callable[[Mapping[str, float]], None] = True,
|
|
232
|
+
error_on_conversion: bool = False,
|
|
233
|
+
) -> dict[str, float]:
|
|
234
|
+
"""Normalize ``keys`` in mapping ``dict_like`` so their sum is 1.
|
|
235
|
+
|
|
236
|
+
``keys`` may be any iterable of strings and is materialized once while
|
|
237
|
+
collapsing repeated entries preserving their first occurrence.
|
|
238
|
+
|
|
239
|
+
Negative weights are handled according to ``error_on_negative``. When
|
|
240
|
+
``True`` a :class:`ValueError` is raised. Otherwise negatives are logged,
|
|
241
|
+
replaced with ``0`` and the remaining weights are renormalized. If all
|
|
242
|
+
weights are non-positive a uniform distribution is returned.
|
|
243
|
+
|
|
244
|
+
Conversion errors are controlled separately by ``error_on_conversion``. When
|
|
245
|
+
``True`` any :class:`TypeError` or :class:`ValueError` while converting a
|
|
246
|
+
value to ``float`` is propagated. Otherwise the error is logged and the
|
|
247
|
+
``default`` value is used.
|
|
248
|
+
|
|
249
|
+
``warn_once`` accepts either a boolean or a callable. ``False`` logs all
|
|
250
|
+
negative weights using :func:`logging.Logger.warning`. ``True`` (the
|
|
251
|
+
default) creates a fresh :class:`~tnfr.logging_utils.WarnOnce` instance for
|
|
252
|
+
the call, emitting a single warning containing all negative keys. To reuse
|
|
253
|
+
deduplication state across calls, pass a callable such as
|
|
254
|
+
:func:`negative_weights_warn_once`.
|
|
255
|
+
"""
|
|
256
|
+
weights, keys_list, total = _convert_and_validate_weights(
|
|
257
|
+
dict_like,
|
|
258
|
+
keys,
|
|
259
|
+
default,
|
|
260
|
+
error_on_conversion=error_on_conversion,
|
|
261
|
+
error_on_negative=error_on_negative,
|
|
262
|
+
warn_once=warn_once,
|
|
263
|
+
)
|
|
264
|
+
if not keys_list:
|
|
265
|
+
return {}
|
|
266
|
+
if total <= 0:
|
|
267
|
+
uniform = 1.0 / len(keys_list)
|
|
268
|
+
return {k: uniform for k in keys_list}
|
|
269
|
+
return {k: w / total for k, w in weights.items()}
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def normalize_counter(
|
|
273
|
+
counts: Mapping[str, float | int],
|
|
274
|
+
) -> tuple[dict[str, float], float]:
|
|
275
|
+
"""Normalize a ``Counter`` returning proportions and total."""
|
|
276
|
+
total = kahan_sum_nd(((c,) for c in counts.values()), dims=1)[0]
|
|
277
|
+
if total <= 0:
|
|
278
|
+
return {}, 0
|
|
279
|
+
dist = {k: v / total for k, v in counts.items() if v}
|
|
280
|
+
return dist, total
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def mix_groups(
|
|
284
|
+
dist: Mapping[str, float],
|
|
285
|
+
groups: Mapping[str, Iterable[str]],
|
|
286
|
+
*,
|
|
287
|
+
prefix: str = "_",
|
|
288
|
+
) -> dict[str, float]:
|
|
289
|
+
"""Aggregate values of ``dist`` according to ``groups``."""
|
|
290
|
+
out: dict[str, float] = dict(dist)
|
|
291
|
+
out.update(
|
|
292
|
+
{
|
|
293
|
+
f"{prefix}{label}": kahan_sum_nd(
|
|
294
|
+
((dist.get(k, 0.0),) for k in keys),
|
|
295
|
+
dims=1,
|
|
296
|
+
)[0]
|
|
297
|
+
for label, keys in groups.items()
|
|
298
|
+
}
|
|
299
|
+
)
|
|
300
|
+
return out
|
tnfr/config.py
CHANGED
|
@@ -1,41 +1,32 @@
|
|
|
1
|
-
"""
|
|
2
|
-
|
|
3
|
-
Permite definir parámetros en JSON o YAML y aplicarlos sobre ``G.graph``
|
|
4
|
-
reutilizando :func:`tnfr.constants.inject_defaults`.
|
|
5
|
-
"""
|
|
1
|
+
"""Configuration utilities."""
|
|
6
2
|
|
|
7
3
|
from __future__ import annotations
|
|
8
|
-
from typing import Any,
|
|
9
|
-
import
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
import yaml # type: ignore
|
|
13
|
-
except Exception: # pragma: no cover
|
|
14
|
-
yaml = None
|
|
4
|
+
from typing import Any, TYPE_CHECKING
|
|
5
|
+
from collections.abc import Mapping
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from .io import read_structured_file
|
|
15
8
|
|
|
16
9
|
from .constants import inject_defaults
|
|
17
10
|
|
|
11
|
+
if TYPE_CHECKING: # pragma: no cover - only for type checkers
|
|
12
|
+
import networkx as nx # type: ignore[import-untyped]
|
|
13
|
+
|
|
14
|
+
__all__ = ("load_config", "apply_config")
|
|
15
|
+
|
|
18
16
|
|
|
19
|
-
def load_config(path: str) ->
|
|
20
|
-
"""
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
if
|
|
24
|
-
|
|
25
|
-
raise RuntimeError("pyyaml no está instalado")
|
|
26
|
-
data = yaml.safe_load(text)
|
|
27
|
-
else:
|
|
28
|
-
data = json.loads(text)
|
|
29
|
-
if not isinstance(data, dict):
|
|
30
|
-
raise ValueError("El archivo de configuración debe contener un objeto")
|
|
17
|
+
def load_config(path: str | Path) -> Mapping[str, Any]:
|
|
18
|
+
"""Read a JSON/YAML file and return a mapping with parameters."""
|
|
19
|
+
path_obj = path if isinstance(path, Path) else Path(path)
|
|
20
|
+
data = read_structured_file(path_obj)
|
|
21
|
+
if not isinstance(data, Mapping):
|
|
22
|
+
raise ValueError("Configuration file must contain an object")
|
|
31
23
|
return data
|
|
32
24
|
|
|
33
25
|
|
|
34
|
-
def apply_config(G, path: str) -> None:
|
|
35
|
-
"""
|
|
26
|
+
def apply_config(G: nx.Graph, path: str | Path) -> None:
|
|
27
|
+
"""Inject parameters from ``path`` into ``G.graph``.
|
|
36
28
|
|
|
37
|
-
|
|
38
|
-
*defaults* canónicos.
|
|
29
|
+
Reuses :func:`inject_defaults` to keep canonical default semantics.
|
|
39
30
|
"""
|
|
40
31
|
cfg = load_config(path)
|
|
41
32
|
inject_defaults(G, cfg, override=True)
|