tnfr 4.5.1__py3-none-any.whl → 4.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (78) hide show
  1. tnfr/__init__.py +91 -90
  2. tnfr/alias.py +546 -0
  3. tnfr/cache.py +578 -0
  4. tnfr/callback_utils.py +388 -0
  5. tnfr/cli/__init__.py +75 -0
  6. tnfr/cli/arguments.py +177 -0
  7. tnfr/cli/execution.py +288 -0
  8. tnfr/cli/utils.py +36 -0
  9. tnfr/collections_utils.py +300 -0
  10. tnfr/config.py +19 -28
  11. tnfr/constants/__init__.py +174 -0
  12. tnfr/constants/core.py +159 -0
  13. tnfr/constants/init.py +31 -0
  14. tnfr/constants/metric.py +110 -0
  15. tnfr/constants_glyphs.py +98 -0
  16. tnfr/dynamics/__init__.py +658 -0
  17. tnfr/dynamics/dnfr.py +733 -0
  18. tnfr/dynamics/integrators.py +267 -0
  19. tnfr/dynamics/sampling.py +31 -0
  20. tnfr/execution.py +201 -0
  21. tnfr/flatten.py +283 -0
  22. tnfr/gamma.py +302 -88
  23. tnfr/glyph_history.py +290 -0
  24. tnfr/grammar.py +285 -96
  25. tnfr/graph_utils.py +84 -0
  26. tnfr/helpers/__init__.py +71 -0
  27. tnfr/helpers/numeric.py +87 -0
  28. tnfr/immutable.py +178 -0
  29. tnfr/import_utils.py +228 -0
  30. tnfr/initialization.py +197 -0
  31. tnfr/io.py +246 -0
  32. tnfr/json_utils.py +162 -0
  33. tnfr/locking.py +37 -0
  34. tnfr/logging_utils.py +116 -0
  35. tnfr/metrics/__init__.py +41 -0
  36. tnfr/metrics/coherence.py +829 -0
  37. tnfr/metrics/common.py +151 -0
  38. tnfr/metrics/core.py +101 -0
  39. tnfr/metrics/diagnosis.py +234 -0
  40. tnfr/metrics/export.py +137 -0
  41. tnfr/metrics/glyph_timing.py +189 -0
  42. tnfr/metrics/reporting.py +148 -0
  43. tnfr/metrics/sense_index.py +120 -0
  44. tnfr/metrics/trig.py +181 -0
  45. tnfr/metrics/trig_cache.py +109 -0
  46. tnfr/node.py +214 -159
  47. tnfr/observers.py +126 -136
  48. tnfr/ontosim.py +134 -134
  49. tnfr/operators/__init__.py +420 -0
  50. tnfr/operators/jitter.py +203 -0
  51. tnfr/operators/remesh.py +485 -0
  52. tnfr/presets.py +46 -14
  53. tnfr/rng.py +254 -0
  54. tnfr/selector.py +210 -0
  55. tnfr/sense.py +284 -131
  56. tnfr/structural.py +207 -79
  57. tnfr/tokens.py +60 -0
  58. tnfr/trace.py +329 -94
  59. tnfr/types.py +43 -17
  60. tnfr/validators.py +70 -24
  61. tnfr/value_utils.py +59 -0
  62. tnfr-4.5.2.dist-info/METADATA +379 -0
  63. tnfr-4.5.2.dist-info/RECORD +67 -0
  64. tnfr/cli.py +0 -322
  65. tnfr/constants.py +0 -277
  66. tnfr/dynamics.py +0 -814
  67. tnfr/helpers.py +0 -264
  68. tnfr/main.py +0 -47
  69. tnfr/metrics.py +0 -597
  70. tnfr/operators.py +0 -525
  71. tnfr/program.py +0 -176
  72. tnfr/scenarios.py +0 -34
  73. tnfr-4.5.1.dist-info/METADATA +0 -221
  74. tnfr-4.5.1.dist-info/RECORD +0 -28
  75. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/WHEEL +0 -0
  76. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/entry_points.txt +0 -0
  77. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/licenses/LICENSE.md +0 -0
  78. {tnfr-4.5.1.dist-info → tnfr-4.5.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,420 @@
1
+ """Network operators."""
2
+
3
+ from __future__ import annotations
4
+ from typing import Any, TYPE_CHECKING, Callable
5
+ import math
6
+ import heapq
7
+ from itertools import islice
8
+ from statistics import fmean, StatisticsError
9
+
10
+ from ..alias import get_attr
11
+ from ..constants import DEFAULTS, get_aliases, get_param
12
+
13
+ from ..helpers.numeric import angle_diff
14
+ from ..metrics.trig import neighbor_phase_mean
15
+ from ..import_utils import get_nodonx
16
+ from ..rng import make_rng
17
+ from tnfr import glyph_history
18
+ from ..types import Glyph
19
+
20
+ from .jitter import (
21
+ JitterCache,
22
+ JitterCacheManager,
23
+ get_jitter_manager,
24
+ reset_jitter_manager,
25
+ random_jitter,
26
+ )
27
+ from .remesh import (
28
+ apply_network_remesh,
29
+ apply_topological_remesh,
30
+ apply_remesh_if_globally_stable,
31
+ )
32
+
33
+ if TYPE_CHECKING: # pragma: no cover - type checking only
34
+ from ..node import NodoProtocol
35
+
36
+ ALIAS_EPI = get_aliases("EPI")
37
+
38
+ __all__ = [
39
+ "JitterCache",
40
+ "JitterCacheManager",
41
+ "get_jitter_manager",
42
+ "reset_jitter_manager",
43
+ "random_jitter",
44
+ "get_neighbor_epi",
45
+ "get_glyph_factors",
46
+ "GLYPH_OPERATIONS",
47
+ "apply_glyph_obj",
48
+ "apply_glyph",
49
+ "apply_network_remesh",
50
+ "apply_topological_remesh",
51
+ "apply_remesh_if_globally_stable",
52
+ ]
53
+
54
+
55
+ def get_glyph_factors(node: NodoProtocol) -> dict[str, Any]:
56
+ """Return glyph factors for ``node`` with defaults."""
57
+ return node.graph.get("GLYPH_FACTORS", DEFAULTS["GLYPH_FACTORS"].copy())
58
+
59
+
60
+ def get_factor(gf: dict[str, Any], key: str, default: float) -> float:
61
+ """Return ``gf[key]`` as ``float`` with ``default`` fallback."""
62
+ return float(gf.get(key, default))
63
+
64
+
65
+ # -------------------------
66
+ # Glyphs (operadores locales)
67
+ # -------------------------
68
+
69
+
70
+ def get_neighbor_epi(node: NodoProtocol) -> tuple[list[NodoProtocol], float]:
71
+ """Return neighbour list and their mean ``EPI`` without mutating ``node``."""
72
+
73
+ epi = node.EPI
74
+ neigh = list(node.neighbors())
75
+ if not neigh:
76
+ return [], epi
77
+
78
+ if hasattr(node, "G"):
79
+ G = node.G
80
+ total = 0.0
81
+ count = 0
82
+ has_valid_neighbor = False
83
+ needs_conversion = False
84
+ for v in neigh:
85
+ if hasattr(v, "EPI"):
86
+ total += float(v.EPI)
87
+ has_valid_neighbor = True
88
+ else:
89
+ attr = get_attr(G.nodes[v], ALIAS_EPI, None)
90
+ if attr is not None:
91
+ total += float(attr)
92
+ has_valid_neighbor = True
93
+ else:
94
+ total += float(epi)
95
+ needs_conversion = True
96
+ count += 1
97
+ if not has_valid_neighbor:
98
+ return [], epi
99
+ epi_bar = total / count if count else float(epi)
100
+ if needs_conversion:
101
+ NodoNX = get_nodonx()
102
+ if NodoNX is None:
103
+ raise ImportError("NodoNX is unavailable")
104
+ neigh = [
105
+ v if hasattr(v, "EPI") else NodoNX.from_graph(node.G, v)
106
+ for v in neigh
107
+ ]
108
+ else:
109
+ try:
110
+ epi_bar = fmean(v.EPI for v in neigh)
111
+ except StatisticsError:
112
+ epi_bar = epi
113
+
114
+ return neigh, epi_bar
115
+
116
+
117
+ def _determine_dominant(
118
+ neigh: list[NodoProtocol], default_kind: str
119
+ ) -> tuple[str, float]:
120
+ """Return dominant ``epi_kind`` among ``neigh`` and its absolute ``EPI``."""
121
+ best_kind: str | None = None
122
+ best_abs = 0.0
123
+ for v in neigh:
124
+ abs_v = abs(v.EPI)
125
+ if abs_v > best_abs:
126
+ best_abs = abs_v
127
+ best_kind = v.epi_kind
128
+ if not best_kind:
129
+ return default_kind, 0.0
130
+ return best_kind, best_abs
131
+
132
+
133
+ def _mix_epi_with_neighbors(
134
+ node: NodoProtocol, mix: float, default_glyph: Glyph | str
135
+ ) -> tuple[float, str]:
136
+ """Mix ``EPI`` of ``node`` with the mean of its neighbours."""
137
+ default_kind = (
138
+ default_glyph.value
139
+ if isinstance(default_glyph, Glyph)
140
+ else str(default_glyph)
141
+ )
142
+ epi = node.EPI
143
+ neigh, epi_bar = get_neighbor_epi(node)
144
+
145
+ if not neigh:
146
+ node.epi_kind = default_kind
147
+ return epi, default_kind
148
+
149
+ dominant, best_abs = _determine_dominant(neigh, default_kind)
150
+ new_epi = (1 - mix) * epi + mix * epi_bar
151
+ node.EPI = new_epi
152
+ final = dominant if best_abs > abs(new_epi) else node.epi_kind
153
+ if not final:
154
+ final = default_kind
155
+ node.epi_kind = final
156
+ return epi_bar, final
157
+
158
+
159
+ def _op_AL(node: NodoProtocol, gf: dict[str, Any]) -> None: # AL — Emisión
160
+ f = get_factor(gf, "AL_boost", 0.05)
161
+ node.EPI = node.EPI + f
162
+
163
+
164
+ def _op_EN(node: NodoProtocol, gf: dict[str, Any]) -> None: # EN — Recepción
165
+ mix = get_factor(gf, "EN_mix", 0.25)
166
+ _mix_epi_with_neighbors(node, mix, Glyph.EN)
167
+
168
+
169
+ def _op_IL(node: NodoProtocol, gf: dict[str, Any]) -> None: # IL — Coherencia
170
+ factor = get_factor(gf, "IL_dnfr_factor", 0.7)
171
+ node.dnfr = factor * getattr(node, "dnfr", 0.0)
172
+
173
+
174
+ def _op_OZ(node: NodoProtocol, gf: dict[str, Any]) -> None: # OZ — Disonancia
175
+ factor = get_factor(gf, "OZ_dnfr_factor", 1.3)
176
+ dnfr = getattr(node, "dnfr", 0.0)
177
+ if bool(node.graph.get("OZ_NOISE_MODE", False)):
178
+ sigma = float(node.graph.get("OZ_SIGMA", 0.1))
179
+ if sigma <= 0:
180
+ node.dnfr = dnfr
181
+ return
182
+ node.dnfr = dnfr + random_jitter(node, sigma)
183
+ else:
184
+ node.dnfr = factor * dnfr if abs(dnfr) > 1e-9 else 0.1
185
+
186
+
187
+ def _um_candidate_iter(node: NodoProtocol):
188
+ sample_ids = node.graph.get("_node_sample")
189
+ if sample_ids is not None and hasattr(node, "G"):
190
+ NodoNX = get_nodonx()
191
+ if NodoNX is None:
192
+ raise ImportError("NodoNX is unavailable")
193
+ base = (NodoNX.from_graph(node.G, j) for j in sample_ids)
194
+ else:
195
+ base = node.all_nodes()
196
+ for j in base:
197
+ same = (j is node) or (
198
+ getattr(node, "n", None) == getattr(j, "n", None)
199
+ )
200
+ if same or node.has_edge(j):
201
+ continue
202
+ yield j
203
+
204
+
205
+ def _um_select_candidates(
206
+ node: NodoProtocol,
207
+ candidates,
208
+ limit: int,
209
+ mode: str,
210
+ th: float,
211
+ ):
212
+ """Select a subset of ``candidates`` for UM coupling."""
213
+ rng = make_rng(int(node.graph.get("RANDOM_SEED", 0)), node.offset(), node.G)
214
+
215
+ if limit <= 0:
216
+ return list(candidates)
217
+
218
+ if mode == "proximity":
219
+ return heapq.nsmallest(
220
+ limit, candidates, key=lambda j: abs(angle_diff(j.theta, th))
221
+ )
222
+
223
+ reservoir = list(islice(candidates, limit))
224
+ for i, cand in enumerate(candidates, start=limit):
225
+ j = rng.randint(0, i)
226
+ if j < limit:
227
+ reservoir[j] = cand
228
+
229
+ if mode == "sample":
230
+ rng.shuffle(reservoir)
231
+
232
+ return reservoir
233
+
234
+
235
+ def _op_UM(node: NodoProtocol, gf: dict[str, Any]) -> None: # UM — Coupling
236
+ k = get_factor(gf, "UM_theta_push", 0.25)
237
+ th = node.theta
238
+ thL = neighbor_phase_mean(node)
239
+ d = angle_diff(thL, th)
240
+ node.theta = th + k * d
241
+
242
+ if bool(node.graph.get("UM_FUNCTIONAL_LINKS", False)):
243
+ thr = float(
244
+ node.graph.get(
245
+ "UM_COMPAT_THRESHOLD",
246
+ DEFAULTS.get("UM_COMPAT_THRESHOLD", 0.75),
247
+ )
248
+ )
249
+ epi_i = node.EPI
250
+ si_i = node.Si
251
+
252
+ limit = int(node.graph.get("UM_CANDIDATE_COUNT", 0))
253
+ mode = str(node.graph.get("UM_CANDIDATE_MODE", "sample")).lower()
254
+ candidates = _um_select_candidates(
255
+ node, _um_candidate_iter(node), limit, mode, th
256
+ )
257
+
258
+ for j in candidates:
259
+ th_j = j.theta
260
+ dphi = abs(angle_diff(th_j, th)) / math.pi
261
+ epi_j = j.EPI
262
+ si_j = j.Si
263
+ epi_sim = 1.0 - abs(epi_i - epi_j) / (
264
+ abs(epi_i) + abs(epi_j) + 1e-9
265
+ )
266
+ si_sim = 1.0 - abs(si_i - si_j)
267
+ compat = (1 - dphi) * 0.5 + 0.25 * epi_sim + 0.25 * si_sim
268
+ if compat >= thr:
269
+ node.add_edge(j, compat)
270
+
271
+
272
+ def _op_RA(node: NodoProtocol, gf: dict[str, Any]) -> None: # RA — Resonancia
273
+ diff = get_factor(gf, "RA_epi_diff", 0.15)
274
+ _mix_epi_with_neighbors(node, diff, Glyph.RA)
275
+
276
+
277
+ def _op_SHA(node: NodoProtocol, gf: dict[str, Any]) -> None: # SHA — Silencio
278
+ factor = get_factor(gf, "SHA_vf_factor", 0.85)
279
+ node.vf = factor * node.vf
280
+
281
+
282
+ factor_val = 1.15
283
+ factor_nul = 0.85
284
+ _SCALE_FACTORS = {Glyph.VAL: factor_val, Glyph.NUL: factor_nul}
285
+
286
+
287
+ def _op_scale(node: NodoProtocol, factor: float) -> None:
288
+ node.vf *= factor
289
+
290
+
291
+ def _make_scale_op(glyph: Glyph):
292
+ def _op(node: NodoProtocol, gf: dict[str, Any]) -> None:
293
+ key = "VAL_scale" if glyph is Glyph.VAL else "NUL_scale"
294
+ default = _SCALE_FACTORS[glyph]
295
+ factor = get_factor(gf, key, default)
296
+ _op_scale(node, factor)
297
+
298
+ return _op
299
+
300
+
301
+ def _op_THOL(
302
+ node: NodoProtocol, gf: dict[str, Any]
303
+ ) -> None: # THOL — Autoorganización
304
+ a = get_factor(gf, "THOL_accel", 0.10)
305
+ node.dnfr = node.dnfr + a * getattr(node, "d2EPI", 0.0)
306
+
307
+
308
+ def _op_ZHIR(
309
+ node: NodoProtocol, gf: dict[str, Any]
310
+ ) -> None: # ZHIR — Mutación
311
+ shift = get_factor(gf, "ZHIR_theta_shift", math.pi / 2)
312
+ node.theta = node.theta + shift
313
+
314
+
315
+ def _op_NAV(
316
+ node: NodoProtocol, gf: dict[str, Any]
317
+ ) -> None: # NAV — Transición
318
+ dnfr = node.dnfr
319
+ vf = node.vf
320
+ eta = get_factor(gf, "NAV_eta", 0.5)
321
+ strict = bool(node.graph.get("NAV_STRICT", False))
322
+ if strict:
323
+ base = vf
324
+ else:
325
+ sign = 1.0 if dnfr >= 0 else -1.0
326
+ target = sign * vf
327
+ base = (1.0 - eta) * dnfr + eta * target
328
+ j = get_factor(gf, "NAV_jitter", 0.05)
329
+ if bool(node.graph.get("NAV_RANDOM", True)):
330
+ jitter = random_jitter(node, j)
331
+ else:
332
+ jitter = j * (1 if base >= 0 else -1)
333
+ node.dnfr = base + jitter
334
+
335
+
336
+ def _op_REMESH(
337
+ node: NodoProtocol, gf: dict[str, Any] | None = None
338
+ ) -> None: # REMESH — aviso
339
+ step_idx = glyph_history.current_step_idx(node)
340
+ last_warn = node.graph.get("_remesh_warn_step", None)
341
+ if last_warn != step_idx:
342
+ msg = (
343
+ "REMESH es a escala de red. Usa apply_remesh_if_globally_"
344
+ "stable(G) o apply_network_remesh(G)."
345
+ )
346
+ hist = glyph_history.ensure_history(node)
347
+ glyph_history.append_metric(
348
+ hist,
349
+ "events",
350
+ ("warn", {"step": step_idx, "node": None, "msg": msg}),
351
+ )
352
+ node.graph["_remesh_warn_step"] = step_idx
353
+ return
354
+
355
+
356
+ # -------------------------
357
+ # Dispatcher
358
+ # -------------------------
359
+
360
+ GLYPH_OPERATIONS: dict[Glyph, Callable[["NodoProtocol", dict[str, Any]], None]] = {
361
+ Glyph.AL: _op_AL,
362
+ Glyph.EN: _op_EN,
363
+ Glyph.IL: _op_IL,
364
+ Glyph.OZ: _op_OZ,
365
+ Glyph.UM: _op_UM,
366
+ Glyph.RA: _op_RA,
367
+ Glyph.SHA: _op_SHA,
368
+ Glyph.VAL: _make_scale_op(Glyph.VAL),
369
+ Glyph.NUL: _make_scale_op(Glyph.NUL),
370
+ Glyph.THOL: _op_THOL,
371
+ Glyph.ZHIR: _op_ZHIR,
372
+ Glyph.NAV: _op_NAV,
373
+ Glyph.REMESH: _op_REMESH,
374
+ }
375
+
376
+
377
+ def apply_glyph_obj(
378
+ node: NodoProtocol, glyph: Glyph | str, *, window: int | None = None
379
+ ) -> None:
380
+ """Apply ``glyph`` to an object satisfying :class:`NodoProtocol`."""
381
+
382
+ try:
383
+ g = glyph if isinstance(glyph, Glyph) else Glyph(str(glyph))
384
+ except ValueError:
385
+ step_idx = glyph_history.current_step_idx(node)
386
+ hist = glyph_history.ensure_history(node)
387
+ glyph_history.append_metric(
388
+ hist,
389
+ "events",
390
+ (
391
+ "warn",
392
+ {
393
+ "step": step_idx,
394
+ "node": getattr(node, "n", None),
395
+ "msg": f"glyph desconocido: {glyph}",
396
+ },
397
+ ),
398
+ )
399
+ raise ValueError(f"glyph desconocido: {glyph}")
400
+
401
+ op = GLYPH_OPERATIONS.get(g)
402
+ if op is None:
403
+ raise ValueError(f"glyph sin operador: {g}")
404
+ if window is None:
405
+ window = int(get_param(node, "GLYPH_HYSTERESIS_WINDOW"))
406
+ gf = get_glyph_factors(node)
407
+ op(node, gf)
408
+ glyph_history.push_glyph(node._glyph_storage(), g.value, window)
409
+ node.epi_kind = g.value
410
+
411
+
412
+ def apply_glyph(
413
+ G, n, glyph: Glyph | str, *, window: int | None = None
414
+ ) -> None:
415
+ """Adapter to operate on ``networkx`` graphs."""
416
+ NodoNX = get_nodonx()
417
+ if NodoNX is None:
418
+ raise ImportError("NodoNX is unavailable")
419
+ node = NodoNX(G, n)
420
+ apply_glyph_obj(node, glyph, window=window)
@@ -0,0 +1,203 @@
1
+ from __future__ import annotations
2
+ from typing import Any, TYPE_CHECKING
3
+
4
+ from cachetools import LRUCache
5
+
6
+ from ..cache import ensure_node_offset_map
7
+ from ..rng import (
8
+ ScopedCounterCache,
9
+ make_rng,
10
+ base_seed,
11
+ cache_enabled,
12
+ clear_rng_cache as _clear_rng_cache,
13
+ seed_hash,
14
+ )
15
+ from ..import_utils import get_nodonx
16
+
17
+ if TYPE_CHECKING: # pragma: no cover - type checking only
18
+ from ..node import NodoProtocol
19
+
20
+ # Guarded by the cache lock to ensure thread-safe access. ``seq`` stores
21
+ # per-scope jitter sequence counters in an LRU cache bounded to avoid
22
+ # unbounded memory usage.
23
+ _JITTER_MAX_ENTRIES = 1024
24
+
25
+
26
+ class JitterCache:
27
+ """Container for jitter-related caches."""
28
+
29
+ def __init__(self, max_entries: int = _JITTER_MAX_ENTRIES) -> None:
30
+ self._sequence = ScopedCounterCache("jitter", max_entries)
31
+ self.settings: dict[str, Any] = {"max_entries": self._sequence.max_entries}
32
+
33
+ @property
34
+ def seq(self) -> LRUCache[tuple[int, int], int]:
35
+ """Expose the sequence cache for tests and diagnostics."""
36
+
37
+ return self._sequence.cache
38
+
39
+ @property
40
+ def lock(self):
41
+ """Return the lock protecting the sequence cache."""
42
+
43
+ return self._sequence.lock
44
+
45
+ @property
46
+ def max_entries(self) -> int:
47
+ """Return the maximum number of cached jitter sequences."""
48
+
49
+ return self._sequence.max_entries
50
+
51
+ @max_entries.setter
52
+ def max_entries(self, value: int) -> None:
53
+ """Set the maximum number of cached jitter sequences."""
54
+
55
+ self._sequence.configure(max_entries=int(value))
56
+ self.settings["max_entries"] = self._sequence.max_entries
57
+
58
+ def setup(
59
+ self, force: bool = False, max_entries: int | None = None
60
+ ) -> None:
61
+ """Ensure jitter cache matches the configured size."""
62
+
63
+ self._sequence.configure(force=force, max_entries=max_entries)
64
+ self.settings["max_entries"] = self._sequence.max_entries
65
+
66
+ def clear(self) -> None:
67
+ """Clear cached RNGs and jitter state."""
68
+
69
+ _clear_rng_cache()
70
+ self._sequence.clear()
71
+
72
+ def bump(self, key: tuple[int, int]) -> int:
73
+ """Return current jitter sequence counter for ``key`` and increment it."""
74
+
75
+ return self._sequence.bump(key)
76
+
77
+
78
+ class JitterCacheManager:
79
+ """Manager exposing the jitter cache without global reassignment."""
80
+
81
+ def __init__(self, cache: JitterCache | None = None) -> None:
82
+ self.cache = cache or JitterCache()
83
+
84
+ # Convenience passthrough properties
85
+ @property
86
+ def seq(self) -> LRUCache[tuple[int, int], int]:
87
+ return self.cache.seq
88
+
89
+ @property
90
+ def settings(self) -> dict[str, Any]:
91
+ return self.cache.settings
92
+
93
+ @property
94
+ def lock(self):
95
+ return self.cache.lock
96
+
97
+ @property
98
+ def max_entries(self) -> int:
99
+ """Return the maximum number of cached jitter entries."""
100
+ return self.cache.max_entries
101
+
102
+ @max_entries.setter
103
+ def max_entries(self, value: int) -> None:
104
+ """Set the maximum number of cached jitter entries."""
105
+ self.cache.max_entries = value
106
+
107
+ def setup(
108
+ self, force: bool = False, max_entries: int | None = None
109
+ ) -> None:
110
+ """Ensure jitter cache matches the configured size.
111
+
112
+ ``max_entries`` may be provided to explicitly resize the cache.
113
+ When omitted the existing ``cache.max_entries`` is preserved.
114
+ """
115
+ if max_entries is not None:
116
+ self.cache.setup(force=True, max_entries=max_entries)
117
+ else:
118
+ self.cache.setup(force=force)
119
+
120
+ def clear(self) -> None:
121
+ """Clear cached RNGs and jitter state."""
122
+ self.cache.clear()
123
+
124
+ def bump(self, key: tuple[int, int]) -> int:
125
+ """Return and increment the jitter sequence counter for ``key``."""
126
+
127
+ return self.cache.bump(key)
128
+
129
+
130
+ # Lazy manager instance
131
+ _JITTER_MANAGER: JitterCacheManager | None = None
132
+
133
+
134
+ def get_jitter_manager() -> JitterCacheManager:
135
+ """Return the singleton jitter manager, initializing on first use."""
136
+ global _JITTER_MANAGER
137
+ if _JITTER_MANAGER is None:
138
+ _JITTER_MANAGER = JitterCacheManager()
139
+ _JITTER_MANAGER.setup(force=True)
140
+ return _JITTER_MANAGER
141
+
142
+
143
+ def reset_jitter_manager() -> None:
144
+ """Reset the global jitter manager (useful for tests)."""
145
+ global _JITTER_MANAGER
146
+ if _JITTER_MANAGER is not None:
147
+ _JITTER_MANAGER.clear()
148
+ _JITTER_MANAGER = None
149
+
150
+
151
+ def _node_offset(G, n) -> int:
152
+ """Deterministic node index used for jitter seeds."""
153
+ mapping = ensure_node_offset_map(G)
154
+ return int(mapping.get(n, 0))
155
+
156
+
157
+ def _resolve_jitter_seed(node: NodoProtocol) -> tuple[int, int]:
158
+ NodoNX = get_nodonx()
159
+ if NodoNX is None:
160
+ raise ImportError("NodoNX is unavailable")
161
+ if isinstance(node, NodoNX):
162
+ return _node_offset(node.G, node.n), id(node.G)
163
+ uid = getattr(node, "_noise_uid", None)
164
+ if uid is None:
165
+ uid = id(node)
166
+ setattr(node, "_noise_uid", uid)
167
+ return int(uid), id(node)
168
+
169
+
170
+ def random_jitter(
171
+ node: NodoProtocol,
172
+ amplitude: float,
173
+ ) -> float:
174
+ """Return deterministic noise in ``[-amplitude, amplitude]`` for ``node``.
175
+
176
+ The per-node jitter sequences are tracked using the global manager
177
+ returned by :func:`get_jitter_manager`.
178
+ """
179
+ if amplitude < 0:
180
+ raise ValueError("amplitude must be positive")
181
+ if amplitude == 0:
182
+ return 0.0
183
+
184
+ seed_root = base_seed(node.G)
185
+ seed_key, scope_id = _resolve_jitter_seed(node)
186
+
187
+ cache_key = (seed_root, scope_id)
188
+ seq = 0
189
+ if cache_enabled(node.G):
190
+ manager = get_jitter_manager()
191
+ seq = manager.bump(cache_key)
192
+ seed = seed_hash(seed_root, scope_id)
193
+ rng = make_rng(seed, seed_key + seq, node.G)
194
+ return rng.uniform(-amplitude, amplitude)
195
+
196
+
197
+ __all__ = [
198
+ "JitterCache",
199
+ "JitterCacheManager",
200
+ "get_jitter_manager",
201
+ "reset_jitter_manager",
202
+ "random_jitter",
203
+ ]