tnfr 4.5.1__py3-none-any.whl → 4.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (78) hide show
  1. tnfr/__init__.py +91 -90
  2. tnfr/alias.py +546 -0
  3. tnfr/cache.py +578 -0
  4. tnfr/callback_utils.py +388 -0
  5. tnfr/cli/__init__.py +75 -0
  6. tnfr/cli/arguments.py +177 -0
  7. tnfr/cli/execution.py +288 -0
  8. tnfr/cli/utils.py +36 -0
  9. tnfr/collections_utils.py +300 -0
  10. tnfr/config.py +19 -28
  11. tnfr/constants/__init__.py +174 -0
  12. tnfr/constants/core.py +159 -0
  13. tnfr/constants/init.py +31 -0
  14. tnfr/constants/metric.py +110 -0
  15. tnfr/constants_glyphs.py +98 -0
  16. tnfr/dynamics/__init__.py +658 -0
  17. tnfr/dynamics/dnfr.py +733 -0
  18. tnfr/dynamics/integrators.py +267 -0
  19. tnfr/dynamics/sampling.py +31 -0
  20. tnfr/execution.py +201 -0
  21. tnfr/flatten.py +283 -0
  22. tnfr/gamma.py +302 -88
  23. tnfr/glyph_history.py +290 -0
  24. tnfr/grammar.py +285 -96
  25. tnfr/graph_utils.py +84 -0
  26. tnfr/helpers/__init__.py +71 -0
  27. tnfr/helpers/numeric.py +87 -0
  28. tnfr/immutable.py +178 -0
  29. tnfr/import_utils.py +228 -0
  30. tnfr/initialization.py +197 -0
  31. tnfr/io.py +246 -0
  32. tnfr/json_utils.py +162 -0
  33. tnfr/locking.py +37 -0
  34. tnfr/logging_utils.py +116 -0
  35. tnfr/metrics/__init__.py +41 -0
  36. tnfr/metrics/coherence.py +829 -0
  37. tnfr/metrics/common.py +151 -0
  38. tnfr/metrics/core.py +101 -0
  39. tnfr/metrics/diagnosis.py +234 -0
  40. tnfr/metrics/export.py +137 -0
  41. tnfr/metrics/glyph_timing.py +189 -0
  42. tnfr/metrics/reporting.py +148 -0
  43. tnfr/metrics/sense_index.py +120 -0
  44. tnfr/metrics/trig.py +181 -0
  45. tnfr/metrics/trig_cache.py +109 -0
  46. tnfr/node.py +214 -159
  47. tnfr/observers.py +126 -136
  48. tnfr/ontosim.py +134 -134
  49. tnfr/operators/__init__.py +420 -0
  50. tnfr/operators/jitter.py +203 -0
  51. tnfr/operators/remesh.py +485 -0
  52. tnfr/presets.py +46 -14
  53. tnfr/rng.py +254 -0
  54. tnfr/selector.py +210 -0
  55. tnfr/sense.py +284 -131
  56. tnfr/structural.py +207 -79
  57. tnfr/tokens.py +60 -0
  58. tnfr/trace.py +329 -94
  59. tnfr/types.py +43 -17
  60. tnfr/validators.py +70 -24
  61. tnfr/value_utils.py +59 -0
  62. tnfr-4.5.2.dist-info/METADATA +379 -0
  63. tnfr-4.5.2.dist-info/RECORD +67 -0
  64. tnfr/cli.py +0 -322
  65. tnfr/constants.py +0 -277
  66. tnfr/dynamics.py +0 -814
  67. tnfr/helpers.py +0 -264
  68. tnfr/main.py +0 -47
  69. tnfr/metrics.py +0 -597
  70. tnfr/operators.py +0 -525
  71. tnfr/program.py +0 -176
  72. tnfr/scenarios.py +0 -34
  73. tnfr-4.5.1.dist-info/METADATA +0 -221
  74. tnfr-4.5.1.dist-info/RECORD +0 -28
  75. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/WHEEL +0 -0
  76. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/entry_points.txt +0 -0
  77. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/licenses/LICENSE.md +0 -0
  78. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/top_level.txt +0 -0
tnfr/immutable.py ADDED
@@ -0,0 +1,178 @@
1
+ """Utilities for freezing objects and checking immutability.
2
+
3
+ Handlers registered via :func:`functools.singledispatch` live in this module
4
+ and are triggered indirectly by the dispatcher when matching types are
5
+ encountered.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from contextlib import contextmanager
11
+ from dataclasses import asdict, is_dataclass
12
+ from functools import lru_cache, singledispatch, wraps, partial
13
+ from typing import Any, Callable
14
+ from collections.abc import Mapping
15
+ from types import MappingProxyType
16
+ import threading
17
+ import weakref
18
+
19
+ # Types considered immutable without further inspection
20
+ IMMUTABLE_SIMPLE = frozenset(
21
+ {int, float, complex, str, bool, bytes, type(None)}
22
+ )
23
+
24
+
25
+ @contextmanager
26
+ def _cycle_guard(value: Any, seen: set[int] | None = None):
27
+ """Context manager that detects reference cycles during freezing."""
28
+ if seen is None:
29
+ seen = set()
30
+ obj_id = id(value)
31
+ if obj_id in seen:
32
+ raise ValueError("cycle detected")
33
+ seen.add(obj_id)
34
+ try:
35
+ yield seen
36
+ finally:
37
+ seen.remove(obj_id)
38
+
39
+
40
+ def _check_cycle(func: Callable[[Any, set[int] | None], Any]):
41
+ """Decorator applying :func:`_cycle_guard` to ``func``."""
42
+
43
+ @wraps(func)
44
+ def wrapper(value: Any, seen: set[int] | None = None):
45
+ with _cycle_guard(value, seen) as seen:
46
+ return func(value, seen)
47
+
48
+ return wrapper
49
+
50
+
51
+ def _freeze_dataclass(value: Any, seen: set[int]):
52
+ params = getattr(type(value), "__dataclass_params__", None)
53
+ frozen = bool(params and params.frozen)
54
+ data = asdict(value)
55
+ tag = "mapping" if frozen else "dict"
56
+ return (tag, tuple((k, _freeze(v, seen)) for k, v in data.items()))
57
+
58
+
59
+ @singledispatch
60
+ @_check_cycle
61
+ def _freeze(value: Any, seen: set[int] | None = None):
62
+ """Recursively convert ``value`` into an immutable representation."""
63
+ if is_dataclass(value) and not isinstance(value, type):
64
+ return _freeze_dataclass(value, seen)
65
+ if type(value) in IMMUTABLE_SIMPLE:
66
+ return value
67
+ raise TypeError
68
+
69
+
70
+ @_freeze.register(tuple)
71
+ @_check_cycle
72
+ def _freeze_tuple(value: tuple, seen: set[int] | None = None): # noqa: F401
73
+ return tuple(_freeze(v, seen) for v in value)
74
+
75
+
76
+ def _freeze_iterable(container: Any, tag: str, seen: set[int] | None) -> tuple[str, tuple]:
77
+ return (tag, tuple(_freeze(v, seen) for v in container))
78
+
79
+
80
+ def _freeze_iterable_with_tag(
81
+ value: Any, seen: set[int] | None = None, *, tag: str
82
+ ) -> tuple[str, tuple]:
83
+ return _freeze_iterable(value, tag, seen)
84
+
85
+
86
+ def _register_iterable(cls: type, tag: str) -> None:
87
+ _freeze.register(cls)(_check_cycle(partial(_freeze_iterable_with_tag, tag=tag)))
88
+
89
+
90
+ for _cls, _tag in (
91
+ (list, "list"),
92
+ (set, "set"),
93
+ (frozenset, "frozenset"),
94
+ (bytearray, "bytearray"),
95
+ ):
96
+ _register_iterable(_cls, _tag)
97
+
98
+
99
+ @_freeze.register(Mapping)
100
+ @_check_cycle
101
+ def _freeze_mapping(value: Mapping, seen: set[int] | None = None): # noqa: F401
102
+ tag = "dict" if hasattr(value, "__setitem__") else "mapping"
103
+ return (tag, tuple((k, _freeze(v, seen)) for k, v in value.items()))
104
+
105
+
106
+ def _all_immutable(iterable) -> bool:
107
+ return all(_is_immutable_inner(v) for v in iterable)
108
+
109
+
110
+ # Dispatch table kept immutable to avoid accidental mutation.
111
+ _IMMUTABLE_TAG_DISPATCH: Mapping[str, Callable[[tuple], bool]] = MappingProxyType(
112
+ {
113
+ "mapping": lambda v: _all_immutable(v[1]),
114
+ "frozenset": lambda v: _all_immutable(v[1]),
115
+ "list": lambda v: False,
116
+ "set": lambda v: False,
117
+ "bytearray": lambda v: False,
118
+ "dict": lambda v: False,
119
+ }
120
+ )
121
+
122
+
123
+ @lru_cache(maxsize=1024)
124
+ @singledispatch
125
+ def _is_immutable_inner(value: Any) -> bool:
126
+ return type(value) in IMMUTABLE_SIMPLE
127
+
128
+
129
+ @_is_immutable_inner.register(tuple)
130
+ def _is_immutable_inner_tuple(value: tuple) -> bool: # noqa: F401
131
+ if value and isinstance(value[0], str):
132
+ handler = _IMMUTABLE_TAG_DISPATCH.get(value[0])
133
+ if handler is not None:
134
+ return handler(value)
135
+ return _all_immutable(value)
136
+
137
+
138
+ @_is_immutable_inner.register(frozenset)
139
+ def _is_immutable_inner_frozenset(value: frozenset) -> bool: # noqa: F401
140
+ return _all_immutable(value)
141
+
142
+
143
+ _IMMUTABLE_CACHE: weakref.WeakKeyDictionary[Any, bool] = (
144
+ weakref.WeakKeyDictionary()
145
+ )
146
+ _IMMUTABLE_CACHE_LOCK = threading.Lock()
147
+
148
+
149
+ def _is_immutable(value: Any) -> bool:
150
+ """Check recursively if ``value`` is immutable with caching."""
151
+ with _IMMUTABLE_CACHE_LOCK:
152
+ try:
153
+ return _IMMUTABLE_CACHE[value]
154
+ except (KeyError, TypeError):
155
+ pass
156
+
157
+ try:
158
+ frozen = _freeze(value)
159
+ except (TypeError, ValueError):
160
+ result = False
161
+ else:
162
+ result = _is_immutable_inner(frozen)
163
+
164
+ with _IMMUTABLE_CACHE_LOCK:
165
+ try:
166
+ _IMMUTABLE_CACHE[value] = result
167
+ except TypeError:
168
+ pass
169
+
170
+ return result
171
+
172
+
173
+ __all__ = (
174
+ "_freeze",
175
+ "_is_immutable",
176
+ "_is_immutable_inner",
177
+ "_IMMUTABLE_CACHE",
178
+ )
tnfr/import_utils.py ADDED
@@ -0,0 +1,228 @@
1
+ """Helpers for optional imports and cached access to heavy modules.
2
+
3
+ This module centralises caching for optional dependencies. It exposes
4
+ :func:`cached_import`, backed by a small :func:`functools.lru_cache`, alongside a
5
+ light-weight registry that tracks failed imports and warnings. Use
6
+ :func:`prune_failed_imports` or ``cached_import.cache_clear`` to reset state when
7
+ new packages become available at runtime.
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ import importlib
13
+ import warnings
14
+ from collections import OrderedDict
15
+ from dataclasses import dataclass, field
16
+ from functools import lru_cache
17
+ from typing import Any, Callable, Literal
18
+ import threading
19
+
20
+ from .logging_utils import get_logger
21
+
22
+ __all__ = (
23
+ "cached_import",
24
+ "get_numpy",
25
+ "get_nodonx",
26
+ "prune_failed_imports",
27
+ "IMPORT_LOG",
28
+ )
29
+
30
+
31
+ logger = get_logger(__name__)
32
+
33
+
34
+ def _emit(message: str, mode: Literal["warn", "log", "both"]) -> None:
35
+ """Emit ``message`` via :mod:`warnings`, logger or both."""
36
+
37
+ if mode in ("warn", "both"):
38
+ warnings.warn(message, RuntimeWarning, stacklevel=2)
39
+ if mode in ("log", "both"):
40
+ logger.warning(message)
41
+
42
+
43
+ EMIT_MAP: dict[str, Callable[[str], None]] = {
44
+ "warn": lambda msg: _emit(msg, "warn"),
45
+ "log": lambda msg: _emit(msg, "log"),
46
+ "both": lambda msg: _emit(msg, "both"),
47
+ }
48
+
49
+
50
+ def _format_failure_message(module: str, attr: str | None, err: Exception) -> str:
51
+ """Return a standardised failure message."""
52
+
53
+ return (
54
+ f"Failed to import module '{module}': {err}"
55
+ if isinstance(err, ImportError)
56
+ else f"Module '{module}' has no attribute '{attr}': {err}"
57
+ )
58
+
59
+
60
+ _FAILED_IMPORT_LIMIT = 128
61
+ _DEFAULT_CACHE_SIZE = 128
62
+
63
+
64
+ @dataclass(slots=True)
65
+ class ImportRegistry:
66
+ """Process-wide registry tracking failed imports and emitted warnings."""
67
+
68
+ limit: int = _FAILED_IMPORT_LIMIT
69
+ failed: OrderedDict[str, None] = field(default_factory=OrderedDict)
70
+ warned: set[str] = field(default_factory=set)
71
+ lock: threading.Lock = field(default_factory=threading.Lock)
72
+
73
+ def _insert(self, key: str) -> None:
74
+ self.failed[key] = None
75
+ self.failed.move_to_end(key)
76
+ while len(self.failed) > self.limit:
77
+ self.failed.popitem(last=False)
78
+
79
+ def record_failure(self, key: str, *, module: str | None = None) -> None:
80
+ """Record ``key`` and, optionally, ``module`` as failed imports."""
81
+
82
+ with self.lock:
83
+ self._insert(key)
84
+ if module and module != key:
85
+ self._insert(module)
86
+
87
+ def discard(self, key: str) -> None:
88
+ """Remove ``key`` from the registry and clear its warning state."""
89
+
90
+ with self.lock:
91
+ self.failed.pop(key, None)
92
+ self.warned.discard(key)
93
+
94
+ def mark_warning(self, module: str) -> bool:
95
+ """Mark ``module`` as warned and return ``True`` if it was new."""
96
+
97
+ with self.lock:
98
+ if module in self.warned:
99
+ return False
100
+ self.warned.add(module)
101
+ return True
102
+
103
+ def clear(self) -> None:
104
+ """Remove all failure records and warning markers."""
105
+
106
+ with self.lock:
107
+ self.failed.clear()
108
+ self.warned.clear()
109
+
110
+ def __contains__(self, key: str) -> bool: # pragma: no cover - trivial
111
+ with self.lock:
112
+ return key in self.failed
113
+
114
+
115
+ _IMPORT_STATE = ImportRegistry()
116
+ # Public alias to ease direct introspection in tests and diagnostics.
117
+ IMPORT_LOG = _IMPORT_STATE
118
+
119
+
120
+ @lru_cache(maxsize=_DEFAULT_CACHE_SIZE)
121
+ def _import_cached(module_name: str, attr: str | None) -> tuple[bool, Any]:
122
+ """Import ``module_name`` (and optional ``attr``) capturing failures."""
123
+
124
+ try:
125
+ module = importlib.import_module(module_name)
126
+ obj = getattr(module, attr) if attr else module
127
+ except (ImportError, AttributeError) as exc:
128
+ return False, exc
129
+ return True, obj
130
+
131
+
132
+ def _warn_failure(
133
+ module: str,
134
+ attr: str | None,
135
+ err: Exception,
136
+ *,
137
+ emit: Literal["warn", "log", "both"] = "warn",
138
+ ) -> None:
139
+ """Emit a warning about a failed import."""
140
+
141
+ msg = _format_failure_message(module, attr, err)
142
+ if _IMPORT_STATE.mark_warning(module):
143
+ EMIT_MAP[emit](msg)
144
+ else:
145
+ logger.debug(msg)
146
+
147
+
148
+ def cached_import(
149
+ module_name: str,
150
+ attr: str | None = None,
151
+ *,
152
+ fallback: Any | None = None,
153
+ emit: Literal["warn", "log", "both"] = "warn",
154
+ ) -> Any | None:
155
+ """Import ``module_name`` (and optional ``attr``) with caching and fallback.
156
+
157
+ Parameters
158
+ ----------
159
+ module_name:
160
+ Module to import.
161
+ attr:
162
+ Optional attribute to fetch from the module.
163
+ fallback:
164
+ Value returned when the import fails.
165
+ emit:
166
+ Destination for warnings emitted on failure (``"warn"``/``"log"``/``"both"``).
167
+ """
168
+
169
+ key = module_name if attr is None else f"{module_name}.{attr}"
170
+ success, result = _import_cached(module_name, attr)
171
+ if success:
172
+ _IMPORT_STATE.discard(key)
173
+ if attr is not None:
174
+ _IMPORT_STATE.discard(module_name)
175
+ return result
176
+ exc = result
177
+ include_module = isinstance(exc, ImportError)
178
+ _warn_failure(module_name, attr, exc, emit=emit)
179
+ _IMPORT_STATE.record_failure(key, module=module_name if include_module else None)
180
+ return fallback
181
+
182
+
183
+ def _clear_default_cache() -> None:
184
+ global _NP_MISSING_LOGGED
185
+
186
+ _import_cached.cache_clear()
187
+ _NP_MISSING_LOGGED = False
188
+
189
+
190
+ cached_import.cache_clear = _clear_default_cache # type: ignore[attr-defined]
191
+
192
+
193
+ _NP_MISSING_LOGGED = False
194
+
195
+
196
+ def get_numpy() -> Any | None:
197
+ """Return the cached :mod:`numpy` module when available.
198
+
199
+ Import attempts are delegated to :func:`cached_import`, which already caches
200
+ successes and failures. A lightweight flag suppresses duplicate debug logs
201
+ when :mod:`numpy` is unavailable so callers can repeatedly probe without
202
+ spamming the logger.
203
+ """
204
+
205
+ global _NP_MISSING_LOGGED
206
+
207
+ np = cached_import("numpy")
208
+ if np is None:
209
+ if not _NP_MISSING_LOGGED:
210
+ logger.debug("Failed to import numpy; continuing in non-vectorised mode")
211
+ _NP_MISSING_LOGGED = True
212
+ return None
213
+
214
+ if _NP_MISSING_LOGGED:
215
+ _NP_MISSING_LOGGED = False
216
+ return np
217
+
218
+
219
+ def get_nodonx() -> type | None:
220
+ """Return :class:`tnfr.node.NodoNX` using import caching."""
221
+
222
+ return cached_import("tnfr.node", "NodoNX")
223
+
224
+
225
+ def prune_failed_imports() -> None:
226
+ """Clear the registry of recorded import failures and warnings."""
227
+
228
+ _IMPORT_STATE.clear()
tnfr/initialization.py ADDED
@@ -0,0 +1,197 @@
1
+ """Node initialization."""
2
+
3
+ from __future__ import annotations
4
+ import random
5
+ from typing import TYPE_CHECKING
6
+
7
+ from dataclasses import dataclass
8
+
9
+ from .constants import VF_KEY, THETA_KEY, get_graph_param
10
+ from .helpers.numeric import clamp
11
+ from .rng import make_rng
12
+
13
+ if TYPE_CHECKING: # pragma: no cover
14
+ import networkx as nx # type: ignore[import-untyped]
15
+
16
+ __all__ = ("InitParams", "init_node_attrs")
17
+
18
+
19
+ @dataclass
20
+ class InitParams:
21
+ """Parametros de inicialización nodal."""
22
+
23
+ seed: int | None
24
+ init_rand_phase: bool
25
+ th_min: float
26
+ th_max: float
27
+ vf_mode: str
28
+ vf_min_lim: float
29
+ vf_max_lim: float
30
+ vf_uniform_min: float | None
31
+ vf_uniform_max: float | None
32
+ vf_mean: float
33
+ vf_std: float
34
+ clamp_to_limits: bool
35
+ si_min: float
36
+ si_max: float
37
+ epi_val: float
38
+
39
+ @classmethod
40
+ def from_graph(cls, G: "nx.Graph") -> "InitParams":
41
+ """Construir ``InitParams`` desde ``G.graph``."""
42
+
43
+ return cls(
44
+ seed=get_graph_param(G, "RANDOM_SEED", int),
45
+ init_rand_phase=get_graph_param(G, "INIT_RANDOM_PHASE", bool),
46
+ th_min=get_graph_param(G, "INIT_THETA_MIN"),
47
+ th_max=get_graph_param(G, "INIT_THETA_MAX"),
48
+ vf_mode=str(get_graph_param(G, "INIT_VF_MODE", str)).lower(),
49
+ vf_min_lim=get_graph_param(G, "VF_MIN"),
50
+ vf_max_lim=get_graph_param(G, "VF_MAX"),
51
+ vf_uniform_min=get_graph_param(G, "INIT_VF_MIN"),
52
+ vf_uniform_max=get_graph_param(G, "INIT_VF_MAX"),
53
+ vf_mean=get_graph_param(G, "INIT_VF_MEAN"),
54
+ vf_std=get_graph_param(G, "INIT_VF_STD"),
55
+ clamp_to_limits=get_graph_param(
56
+ G, "INIT_VF_CLAMP_TO_LIMITS", bool
57
+ ),
58
+ si_min=get_graph_param(G, "INIT_SI_MIN"),
59
+ si_max=get_graph_param(G, "INIT_SI_MAX"),
60
+ epi_val=get_graph_param(G, "INIT_EPI_VALUE"),
61
+ )
62
+
63
+
64
+ def _init_phase(
65
+ nd: dict,
66
+ rng: random.Random,
67
+ *,
68
+ override: bool,
69
+ random_phase: bool,
70
+ th_min: float,
71
+ th_max: float,
72
+ ) -> None:
73
+ """Initialise ``θ`` in ``nd``."""
74
+ if random_phase:
75
+ if override or THETA_KEY not in nd:
76
+ nd[THETA_KEY] = rng.uniform(th_min, th_max)
77
+ else:
78
+ if override:
79
+ nd[THETA_KEY] = 0.0
80
+ else:
81
+ nd.setdefault(THETA_KEY, 0.0)
82
+
83
+
84
+ def _init_vf(
85
+ nd: dict,
86
+ rng: random.Random,
87
+ *,
88
+ override: bool,
89
+ mode: str,
90
+ vf_uniform_min: float,
91
+ vf_uniform_max: float,
92
+ vf_mean: float,
93
+ vf_std: float,
94
+ vf_min_lim: float,
95
+ vf_max_lim: float,
96
+ clamp_to_limits: bool,
97
+ ) -> None:
98
+ """Initialise ``νf`` in ``nd``."""
99
+ if mode == "uniform":
100
+ vf = rng.uniform(vf_uniform_min, vf_uniform_max)
101
+ elif mode == "normal":
102
+ for _ in range(16):
103
+ cand = rng.normalvariate(vf_mean, vf_std)
104
+ if vf_min_lim <= cand <= vf_max_lim:
105
+ vf = cand
106
+ break
107
+ else:
108
+ vf = min(
109
+ max(rng.normalvariate(vf_mean, vf_std), vf_min_lim),
110
+ vf_max_lim,
111
+ )
112
+ else:
113
+ vf = float(nd.get(VF_KEY, 0.5))
114
+ if clamp_to_limits:
115
+ vf = clamp(vf, vf_min_lim, vf_max_lim)
116
+ if override or VF_KEY not in nd:
117
+ nd[VF_KEY] = vf
118
+
119
+
120
+ def _init_si_epi(
121
+ nd: dict,
122
+ rng: random.Random,
123
+ *,
124
+ override: bool,
125
+ si_min: float,
126
+ si_max: float,
127
+ epi_val: float,
128
+ ) -> None:
129
+ """Initialise ``Si`` and ``EPI`` in ``nd``."""
130
+ if override or "EPI" not in nd:
131
+ nd["EPI"] = epi_val
132
+
133
+ si = rng.uniform(si_min, si_max)
134
+ if override or "Si" not in nd:
135
+ nd["Si"] = si
136
+
137
+
138
+ def init_node_attrs(G: "nx.Graph", *, override: bool = True) -> "nx.Graph":
139
+ """Initialise EPI, θ, νf and Si on the nodes of ``G``.
140
+
141
+ Parameters can be customised via ``G.graph`` entries:
142
+ ``RANDOM_SEED``, ``INIT_RANDOM_PHASE``, ``INIT_THETA_MIN/MAX``,
143
+ ``INIT_VF_MODE``, ``VF_MIN``, ``VF_MAX``, ``INIT_VF_MIN/MAX``,
144
+ ``INIT_VF_MEAN``, ``INIT_VF_STD`` and ``INIT_VF_CLAMP_TO_LIMITS``.
145
+ Ranges for ``Si`` are added via ``INIT_SI_MIN`` and ``INIT_SI_MAX``, and
146
+ for ``EPI`` via ``INIT_EPI_VALUE``. If ``INIT_VF_MIN`` is greater than
147
+ ``INIT_VF_MAX``, values are swapped and clamped to ``VF_MIN``/``VF_MAX``.
148
+ """
149
+ params = InitParams.from_graph(G)
150
+
151
+ vf_uniform_min = params.vf_uniform_min
152
+ vf_uniform_max = params.vf_uniform_max
153
+ vf_min_lim = params.vf_min_lim
154
+ vf_max_lim = params.vf_max_lim
155
+ if vf_uniform_min is None:
156
+ vf_uniform_min = vf_min_lim
157
+ if vf_uniform_max is None:
158
+ vf_uniform_max = vf_max_lim
159
+ if vf_uniform_min > vf_uniform_max:
160
+ vf_uniform_min, vf_uniform_max = vf_uniform_max, vf_uniform_min
161
+ params.vf_uniform_min = max(vf_uniform_min, vf_min_lim)
162
+ params.vf_uniform_max = min(vf_uniform_max, vf_max_lim)
163
+
164
+ rng = make_rng(params.seed, -1, G)
165
+ for _, nd in G.nodes(data=True):
166
+
167
+ _init_phase(
168
+ nd,
169
+ rng,
170
+ override=override,
171
+ random_phase=params.init_rand_phase,
172
+ th_min=params.th_min,
173
+ th_max=params.th_max,
174
+ )
175
+ _init_vf(
176
+ nd,
177
+ rng,
178
+ override=override,
179
+ mode=params.vf_mode,
180
+ vf_uniform_min=params.vf_uniform_min,
181
+ vf_uniform_max=params.vf_uniform_max,
182
+ vf_mean=params.vf_mean,
183
+ vf_std=params.vf_std,
184
+ vf_min_lim=params.vf_min_lim,
185
+ vf_max_lim=params.vf_max_lim,
186
+ clamp_to_limits=params.clamp_to_limits,
187
+ )
188
+ _init_si_epi(
189
+ nd,
190
+ rng,
191
+ override=override,
192
+ si_min=params.si_min,
193
+ si_max=params.si_max,
194
+ epi_val=params.epi_val,
195
+ )
196
+
197
+ return G