tnfr 4.5.2__py3-none-any.whl → 6.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. tnfr/__init__.py +228 -49
  2. tnfr/__init__.pyi +40 -0
  3. tnfr/_compat.py +11 -0
  4. tnfr/_version.py +7 -0
  5. tnfr/_version.pyi +7 -0
  6. tnfr/alias.py +106 -21
  7. tnfr/alias.pyi +140 -0
  8. tnfr/cache.py +666 -512
  9. tnfr/cache.pyi +232 -0
  10. tnfr/callback_utils.py +2 -9
  11. tnfr/callback_utils.pyi +105 -0
  12. tnfr/cli/__init__.py +21 -7
  13. tnfr/cli/__init__.pyi +47 -0
  14. tnfr/cli/arguments.py +42 -20
  15. tnfr/cli/arguments.pyi +33 -0
  16. tnfr/cli/execution.py +54 -20
  17. tnfr/cli/execution.pyi +80 -0
  18. tnfr/cli/utils.py +0 -2
  19. tnfr/cli/utils.pyi +8 -0
  20. tnfr/config/__init__.py +12 -0
  21. tnfr/config/__init__.pyi +8 -0
  22. tnfr/config/constants.py +104 -0
  23. tnfr/config/constants.pyi +12 -0
  24. tnfr/{config.py → config/init.py} +11 -7
  25. tnfr/config/init.pyi +8 -0
  26. tnfr/config/operator_names.py +106 -0
  27. tnfr/config/operator_names.pyi +28 -0
  28. tnfr/config/presets.py +104 -0
  29. tnfr/config/presets.pyi +7 -0
  30. tnfr/constants/__init__.py +78 -24
  31. tnfr/constants/__init__.pyi +104 -0
  32. tnfr/constants/core.py +1 -2
  33. tnfr/constants/core.pyi +17 -0
  34. tnfr/constants/init.pyi +12 -0
  35. tnfr/constants/metric.py +4 -12
  36. tnfr/constants/metric.pyi +19 -0
  37. tnfr/constants_glyphs.py +9 -91
  38. tnfr/constants_glyphs.pyi +12 -0
  39. tnfr/dynamics/__init__.py +112 -634
  40. tnfr/dynamics/__init__.pyi +83 -0
  41. tnfr/dynamics/adaptation.py +201 -0
  42. tnfr/dynamics/aliases.py +22 -0
  43. tnfr/dynamics/coordination.py +343 -0
  44. tnfr/dynamics/dnfr.py +1936 -354
  45. tnfr/dynamics/dnfr.pyi +33 -0
  46. tnfr/dynamics/integrators.py +369 -75
  47. tnfr/dynamics/integrators.pyi +35 -0
  48. tnfr/dynamics/runtime.py +521 -0
  49. tnfr/dynamics/sampling.py +8 -5
  50. tnfr/dynamics/sampling.pyi +7 -0
  51. tnfr/dynamics/selectors.py +680 -0
  52. tnfr/execution.py +56 -41
  53. tnfr/execution.pyi +65 -0
  54. tnfr/flatten.py +7 -7
  55. tnfr/flatten.pyi +28 -0
  56. tnfr/gamma.py +54 -37
  57. tnfr/gamma.pyi +40 -0
  58. tnfr/glyph_history.py +85 -38
  59. tnfr/glyph_history.pyi +53 -0
  60. tnfr/grammar.py +19 -338
  61. tnfr/grammar.pyi +13 -0
  62. tnfr/helpers/__init__.py +110 -30
  63. tnfr/helpers/__init__.pyi +66 -0
  64. tnfr/helpers/numeric.py +1 -0
  65. tnfr/helpers/numeric.pyi +12 -0
  66. tnfr/immutable.py +55 -19
  67. tnfr/immutable.pyi +37 -0
  68. tnfr/initialization.py +12 -10
  69. tnfr/initialization.pyi +73 -0
  70. tnfr/io.py +99 -34
  71. tnfr/io.pyi +11 -0
  72. tnfr/locking.pyi +7 -0
  73. tnfr/metrics/__init__.pyi +20 -0
  74. tnfr/metrics/coherence.py +934 -294
  75. tnfr/metrics/common.py +1 -3
  76. tnfr/metrics/common.pyi +15 -0
  77. tnfr/metrics/core.py +192 -34
  78. tnfr/metrics/core.pyi +13 -0
  79. tnfr/metrics/diagnosis.py +707 -101
  80. tnfr/metrics/diagnosis.pyi +89 -0
  81. tnfr/metrics/export.py +27 -13
  82. tnfr/metrics/glyph_timing.py +218 -38
  83. tnfr/metrics/reporting.py +22 -18
  84. tnfr/metrics/reporting.pyi +12 -0
  85. tnfr/metrics/sense_index.py +199 -25
  86. tnfr/metrics/sense_index.pyi +9 -0
  87. tnfr/metrics/trig.py +53 -18
  88. tnfr/metrics/trig.pyi +12 -0
  89. tnfr/metrics/trig_cache.py +3 -7
  90. tnfr/metrics/trig_cache.pyi +10 -0
  91. tnfr/node.py +148 -125
  92. tnfr/node.pyi +161 -0
  93. tnfr/observers.py +44 -30
  94. tnfr/observers.pyi +46 -0
  95. tnfr/ontosim.py +14 -13
  96. tnfr/ontosim.pyi +33 -0
  97. tnfr/operators/__init__.py +84 -52
  98. tnfr/operators/__init__.pyi +31 -0
  99. tnfr/operators/definitions.py +181 -0
  100. tnfr/operators/definitions.pyi +92 -0
  101. tnfr/operators/jitter.py +86 -23
  102. tnfr/operators/jitter.pyi +11 -0
  103. tnfr/operators/registry.py +80 -0
  104. tnfr/operators/registry.pyi +15 -0
  105. tnfr/operators/remesh.py +141 -57
  106. tnfr/presets.py +9 -54
  107. tnfr/presets.pyi +7 -0
  108. tnfr/py.typed +0 -0
  109. tnfr/rng.py +259 -73
  110. tnfr/rng.pyi +14 -0
  111. tnfr/selector.py +24 -17
  112. tnfr/selector.pyi +19 -0
  113. tnfr/sense.py +55 -43
  114. tnfr/sense.pyi +30 -0
  115. tnfr/structural.py +44 -267
  116. tnfr/structural.pyi +46 -0
  117. tnfr/telemetry/__init__.py +13 -0
  118. tnfr/telemetry/verbosity.py +37 -0
  119. tnfr/tokens.py +3 -2
  120. tnfr/tokens.pyi +41 -0
  121. tnfr/trace.py +272 -82
  122. tnfr/trace.pyi +68 -0
  123. tnfr/types.py +345 -6
  124. tnfr/types.pyi +145 -0
  125. tnfr/utils/__init__.py +158 -0
  126. tnfr/utils/__init__.pyi +133 -0
  127. tnfr/utils/cache.py +755 -0
  128. tnfr/utils/cache.pyi +156 -0
  129. tnfr/{collections_utils.py → utils/data.py} +57 -90
  130. tnfr/utils/data.pyi +73 -0
  131. tnfr/utils/graph.py +87 -0
  132. tnfr/utils/graph.pyi +10 -0
  133. tnfr/utils/init.py +746 -0
  134. tnfr/utils/init.pyi +85 -0
  135. tnfr/{json_utils.py → utils/io.py} +13 -18
  136. tnfr/utils/io.pyi +10 -0
  137. tnfr/utils/validators.py +130 -0
  138. tnfr/utils/validators.pyi +19 -0
  139. tnfr/validation/__init__.py +25 -0
  140. tnfr/validation/__init__.pyi +17 -0
  141. tnfr/validation/compatibility.py +59 -0
  142. tnfr/validation/compatibility.pyi +8 -0
  143. tnfr/validation/grammar.py +149 -0
  144. tnfr/validation/grammar.pyi +11 -0
  145. tnfr/validation/rules.py +194 -0
  146. tnfr/validation/rules.pyi +18 -0
  147. tnfr/validation/syntax.py +151 -0
  148. tnfr/validation/syntax.pyi +7 -0
  149. tnfr-6.0.0.dist-info/METADATA +135 -0
  150. tnfr-6.0.0.dist-info/RECORD +157 -0
  151. tnfr/graph_utils.py +0 -84
  152. tnfr/import_utils.py +0 -228
  153. tnfr/logging_utils.py +0 -116
  154. tnfr/validators.py +0 -84
  155. tnfr/value_utils.py +0 -59
  156. tnfr-4.5.2.dist-info/METADATA +0 -379
  157. tnfr-4.5.2.dist-info/RECORD +0 -67
  158. {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/WHEEL +0 -0
  159. {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/entry_points.txt +0 -0
  160. {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/licenses/LICENSE.md +0 -0
  161. {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,521 @@
1
+ """Runtime orchestration for TNFR dynamics."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import inspect
6
+ import math
7
+ import sys
8
+ from collections import deque
9
+ from collections.abc import Mapping, MutableMapping, MutableSequence
10
+ from typing import Any, cast
11
+
12
+ from ..alias import (
13
+ get_attr,
14
+ get_theta_attr,
15
+ set_attr,
16
+ set_theta,
17
+ set_theta_attr,
18
+ set_vf,
19
+ multi_recompute_abs_max,
20
+ )
21
+ from ..callback_utils import CallbackEvent, callback_manager
22
+ from ..constants import DEFAULTS, get_graph_param, get_param
23
+ from ..glyph_history import ensure_history
24
+ from ..helpers.numeric import clamp
25
+ from ..metrics.sense_index import compute_Si
26
+ from ..types import HistoryState, NodeId, TNFRGraph
27
+ from .aliases import ALIAS_DNFR, ALIAS_EPI, ALIAS_SI, ALIAS_VF
28
+ from . import adaptation, coordination, integrators, selectors
29
+ from .dnfr import default_compute_delta_nfr
30
+ from .sampling import update_node_sample as _update_node_sample
31
+ from ..operators import apply_remesh_if_globally_stable
32
+
33
+ HistoryLog = MutableSequence[MutableMapping[str, object]]
34
+
35
+ __all__ = (
36
+ "ALIAS_VF",
37
+ "ALIAS_DNFR",
38
+ "ALIAS_EPI",
39
+ "ALIAS_SI",
40
+ "apply_canonical_clamps",
41
+ "validate_canon",
42
+ "_normalize_job_overrides",
43
+ "_resolve_jobs_override",
44
+ "_prepare_dnfr",
45
+ "_update_nodes",
46
+ "_update_epi_hist",
47
+ "_maybe_remesh",
48
+ "_run_validators",
49
+ "_run_before_callbacks",
50
+ "_run_after_callbacks",
51
+ "step",
52
+ "run",
53
+ )
54
+
55
+
56
+ def _log_clamp(
57
+ hist: HistoryLog,
58
+ node: NodeId | None,
59
+ attr: str,
60
+ value: float,
61
+ lo: float,
62
+ hi: float,
63
+ ) -> None:
64
+ if value < lo or value > hi:
65
+ hist.append({"node": node, "attr": attr, "value": float(value)})
66
+
67
+
68
+ def _normalize_job_overrides(
69
+ job_overrides: Mapping[str, Any] | None,
70
+ ) -> dict[str, Any]:
71
+ if not job_overrides:
72
+ return {}
73
+
74
+ normalized: dict[str, Any] = {}
75
+ for key, value in job_overrides.items():
76
+ if key is None:
77
+ continue
78
+ key_str = str(key).upper()
79
+ if key_str.endswith("_N_JOBS"):
80
+ key_str = key_str[: -len("_N_JOBS")]
81
+ normalized[key_str] = value
82
+ return normalized
83
+
84
+
85
+ def _coerce_jobs_value(raw: Any) -> int | None:
86
+ if raw is None:
87
+ return None
88
+ try:
89
+ return int(raw)
90
+ except (TypeError, ValueError):
91
+ return None
92
+
93
+
94
+ def _sanitize_jobs(value: int | None, *, allow_non_positive: bool) -> int | None:
95
+ if value is None:
96
+ return None
97
+ if not allow_non_positive and value <= 0:
98
+ return None
99
+ return value
100
+
101
+
102
+ def _resolve_jobs_override(
103
+ overrides: Mapping[str, Any],
104
+ key: str,
105
+ graph_value: Any,
106
+ *,
107
+ allow_non_positive: bool,
108
+ ) -> int | None:
109
+ norm_key = key.upper()
110
+ if overrides and norm_key in overrides:
111
+ return _sanitize_jobs(
112
+ _coerce_jobs_value(overrides.get(norm_key)),
113
+ allow_non_positive=allow_non_positive,
114
+ )
115
+
116
+ return _sanitize_jobs(
117
+ _coerce_jobs_value(graph_value),
118
+ allow_non_positive=allow_non_positive,
119
+ )
120
+
121
+
122
+ _INTEGRATOR_CACHE_KEY = "_integrator_cache"
123
+
124
+
125
+ def _call_integrator_factory(factory: Any, G: TNFRGraph) -> Any:
126
+ """Invoke an integrator factory respecting optional graph injection."""
127
+
128
+ try:
129
+ signature = inspect.signature(factory)
130
+ except (TypeError, ValueError):
131
+ return factory()
132
+
133
+ params = list(signature.parameters.values())
134
+ required = [
135
+ p
136
+ for p in params
137
+ if p.default is inspect._empty
138
+ and p.kind
139
+ in (
140
+ inspect.Parameter.POSITIONAL_ONLY,
141
+ inspect.Parameter.POSITIONAL_OR_KEYWORD,
142
+ inspect.Parameter.KEYWORD_ONLY,
143
+ )
144
+ ]
145
+
146
+ if any(p.kind is inspect.Parameter.KEYWORD_ONLY for p in required):
147
+ raise TypeError(
148
+ "Integrator factory cannot require keyword-only arguments",
149
+ )
150
+
151
+ positional_required = [
152
+ p
153
+ for p in required
154
+ if p.kind
155
+ in (
156
+ inspect.Parameter.POSITIONAL_ONLY,
157
+ inspect.Parameter.POSITIONAL_OR_KEYWORD,
158
+ )
159
+ ]
160
+ if len(positional_required) > 1:
161
+ raise TypeError(
162
+ "Integrator factory must accept at most one positional argument",
163
+ )
164
+
165
+ if positional_required:
166
+ return factory(G)
167
+
168
+ positional = [
169
+ p
170
+ for p in params
171
+ if p.kind
172
+ in (
173
+ inspect.Parameter.POSITIONAL_ONLY,
174
+ inspect.Parameter.POSITIONAL_OR_KEYWORD,
175
+ )
176
+ ]
177
+ if positional:
178
+ return factory(G)
179
+
180
+ return factory()
181
+
182
+
183
+ def _resolve_integrator_instance(G: TNFRGraph) -> integrators.AbstractIntegrator:
184
+ """Return an integrator instance configured on ``G`` or a default."""
185
+
186
+ cache_entry = G.graph.get(_INTEGRATOR_CACHE_KEY)
187
+ candidate = G.graph.get("integrator")
188
+ if (
189
+ isinstance(cache_entry, tuple)
190
+ and len(cache_entry) == 2
191
+ and cache_entry[0] is candidate
192
+ and isinstance(cache_entry[1], integrators.AbstractIntegrator)
193
+ ):
194
+ return cache_entry[1]
195
+
196
+ if isinstance(candidate, integrators.AbstractIntegrator):
197
+ instance = candidate
198
+ elif inspect.isclass(candidate) and issubclass(
199
+ candidate, integrators.AbstractIntegrator
200
+ ):
201
+ instance = candidate()
202
+ elif callable(candidate):
203
+ instance = cast(
204
+ integrators.AbstractIntegrator,
205
+ _call_integrator_factory(candidate, G),
206
+ )
207
+ elif candidate is None:
208
+ instance = integrators.DefaultIntegrator()
209
+ else:
210
+ raise TypeError(
211
+ "Graph integrator must be an AbstractIntegrator, subclass or callable",
212
+ )
213
+
214
+ if not isinstance(instance, integrators.AbstractIntegrator):
215
+ raise TypeError(
216
+ "Configured integrator must implement AbstractIntegrator.integrate",
217
+ )
218
+
219
+ G.graph[_INTEGRATOR_CACHE_KEY] = (candidate, instance)
220
+ return instance
221
+
222
+
223
+ def apply_canonical_clamps(
224
+ nd: MutableMapping[str, Any],
225
+ G: TNFRGraph | None = None,
226
+ node: NodeId | None = None,
227
+ ) -> None:
228
+ if G is not None:
229
+ graph_dict = cast(MutableMapping[str, Any], G.graph)
230
+ graph_data: Mapping[str, Any] = graph_dict
231
+ else:
232
+ graph_dict = None
233
+ graph_data = DEFAULTS
234
+ eps_min = float(graph_data.get("EPI_MIN", DEFAULTS["EPI_MIN"]))
235
+ eps_max = float(graph_data.get("EPI_MAX", DEFAULTS["EPI_MAX"]))
236
+ vf_min = float(graph_data.get("VF_MIN", DEFAULTS["VF_MIN"]))
237
+ vf_max = float(graph_data.get("VF_MAX", DEFAULTS["VF_MAX"]))
238
+ theta_wrap = bool(graph_data.get("THETA_WRAP", DEFAULTS["THETA_WRAP"]))
239
+
240
+ epi = cast(float, get_attr(nd, ALIAS_EPI, 0.0))
241
+ vf = get_attr(nd, ALIAS_VF, 0.0)
242
+ th_val = get_theta_attr(nd, 0.0)
243
+ th = 0.0 if th_val is None else float(th_val)
244
+
245
+ strict = bool(
246
+ graph_data.get("VALIDATORS_STRICT", DEFAULTS.get("VALIDATORS_STRICT", False))
247
+ )
248
+ if strict and graph_dict is not None:
249
+ history = cast(MutableMapping[str, Any], graph_dict.setdefault("history", {}))
250
+ hist = cast(
251
+ HistoryLog,
252
+ history.setdefault("clamp_alerts", []),
253
+ )
254
+ _log_clamp(hist, node, "EPI", float(epi), eps_min, eps_max)
255
+ _log_clamp(hist, node, "VF", float(vf), vf_min, vf_max)
256
+
257
+ set_attr(nd, ALIAS_EPI, clamp(epi, eps_min, eps_max))
258
+
259
+ vf_val = float(clamp(vf, vf_min, vf_max))
260
+ if G is not None and node is not None:
261
+ set_vf(G, node, vf_val, update_max=False)
262
+ else:
263
+ set_attr(nd, ALIAS_VF, vf_val)
264
+
265
+ if theta_wrap:
266
+ new_th = (th + math.pi) % (2 * math.pi) - math.pi
267
+ if G is not None and node is not None:
268
+ set_theta(G, node, new_th)
269
+ else:
270
+ set_theta_attr(nd, new_th)
271
+
272
+
273
+ def validate_canon(G: TNFRGraph) -> TNFRGraph:
274
+ for n, nd in G.nodes(data=True):
275
+ apply_canonical_clamps(cast(MutableMapping[str, Any], nd), G, cast(NodeId, n))
276
+ maxes = multi_recompute_abs_max(G, {"_vfmax": ALIAS_VF})
277
+ G.graph.update(maxes)
278
+ return G
279
+
280
+
281
+ def _run_before_callbacks(
282
+ G: TNFRGraph,
283
+ *,
284
+ step_idx: int,
285
+ dt: float | None,
286
+ use_Si: bool,
287
+ apply_glyphs: bool,
288
+ ) -> None:
289
+ callback_manager.invoke_callbacks(
290
+ G,
291
+ CallbackEvent.BEFORE_STEP.value,
292
+ {
293
+ "step": step_idx,
294
+ "dt": dt,
295
+ "use_Si": use_Si,
296
+ "apply_glyphs": apply_glyphs,
297
+ },
298
+ )
299
+
300
+
301
+ def _prepare_dnfr(
302
+ G: TNFRGraph,
303
+ *,
304
+ use_Si: bool,
305
+ job_overrides: Mapping[str, Any] | None = None,
306
+ ) -> None:
307
+ compute_dnfr_cb = G.graph.get(
308
+ "compute_delta_nfr", default_compute_delta_nfr
309
+ )
310
+ overrides = job_overrides or {}
311
+ n_jobs = _resolve_jobs_override(
312
+ overrides,
313
+ "DNFR",
314
+ G.graph.get("DNFR_N_JOBS"),
315
+ allow_non_positive=False,
316
+ )
317
+
318
+ supports_n_jobs = False
319
+ try:
320
+ signature = inspect.signature(compute_dnfr_cb)
321
+ except (TypeError, ValueError):
322
+ signature = None
323
+ if signature is not None:
324
+ params = signature.parameters
325
+ if "n_jobs" in params:
326
+ kind = params["n_jobs"].kind
327
+ supports_n_jobs = kind in (
328
+ inspect.Parameter.POSITIONAL_OR_KEYWORD,
329
+ inspect.Parameter.KEYWORD_ONLY,
330
+ )
331
+ elif any(
332
+ p.kind == inspect.Parameter.VAR_KEYWORD for p in params.values()
333
+ ):
334
+ supports_n_jobs = True
335
+
336
+ if supports_n_jobs:
337
+ compute_dnfr_cb(G, n_jobs=n_jobs)
338
+ else:
339
+ try:
340
+ compute_dnfr_cb(G, n_jobs=n_jobs)
341
+ except TypeError as exc:
342
+ if "n_jobs" in str(exc):
343
+ compute_dnfr_cb(G)
344
+ else:
345
+ raise
346
+ G.graph.pop("_sel_norms", None)
347
+ if use_Si:
348
+ si_jobs = _resolve_jobs_override(
349
+ overrides,
350
+ "SI",
351
+ G.graph.get("SI_N_JOBS"),
352
+ allow_non_positive=False,
353
+ )
354
+ dynamics_module = sys.modules.get("tnfr.dynamics")
355
+ compute_si_fn = (
356
+ getattr(dynamics_module, "compute_Si", None)
357
+ if dynamics_module is not None
358
+ else None
359
+ )
360
+ if compute_si_fn is None:
361
+ compute_si_fn = compute_Si
362
+ compute_si_fn(G, inplace=True, n_jobs=si_jobs)
363
+
364
+
365
+ def _update_nodes(
366
+ G: TNFRGraph,
367
+ *,
368
+ dt: float | None,
369
+ use_Si: bool,
370
+ apply_glyphs: bool,
371
+ step_idx: int,
372
+ hist: HistoryState,
373
+ job_overrides: Mapping[str, Any] | None = None,
374
+ ) -> None:
375
+ _update_node_sample(G, step=step_idx)
376
+ overrides = job_overrides or {}
377
+ _prepare_dnfr(G, use_Si=use_Si, job_overrides=overrides)
378
+ selector = selectors._apply_selector(G)
379
+ if apply_glyphs:
380
+ selectors._apply_glyphs(G, selector, hist)
381
+ _dt = get_graph_param(G, "DT") if dt is None else float(dt)
382
+ method = get_graph_param(G, "INTEGRATOR_METHOD", str)
383
+ n_jobs = _resolve_jobs_override(
384
+ overrides,
385
+ "INTEGRATOR",
386
+ G.graph.get("INTEGRATOR_N_JOBS"),
387
+ allow_non_positive=True,
388
+ )
389
+ integrator = _resolve_integrator_instance(G)
390
+ integrator.integrate(
391
+ G,
392
+ dt=_dt,
393
+ t=None,
394
+ method=cast(str | None, method),
395
+ n_jobs=n_jobs,
396
+ )
397
+ for n, nd in G.nodes(data=True):
398
+ apply_canonical_clamps(
399
+ cast(MutableMapping[str, Any], nd), G, cast(NodeId, n)
400
+ )
401
+ phase_jobs = _resolve_jobs_override(
402
+ overrides,
403
+ "PHASE",
404
+ G.graph.get("PHASE_N_JOBS"),
405
+ allow_non_positive=True,
406
+ )
407
+ coordination.coordinate_global_local_phase(G, None, None, n_jobs=phase_jobs)
408
+ vf_jobs = _resolve_jobs_override(
409
+ overrides,
410
+ "VF_ADAPT",
411
+ G.graph.get("VF_ADAPT_N_JOBS"),
412
+ allow_non_positive=False,
413
+ )
414
+ adaptation.adapt_vf_by_coherence(G, n_jobs=vf_jobs)
415
+
416
+
417
+ def _update_epi_hist(G: TNFRGraph) -> None:
418
+ tau_g = int(get_param(G, "REMESH_TAU_GLOBAL"))
419
+ tau_l = int(get_param(G, "REMESH_TAU_LOCAL"))
420
+ tau = max(tau_g, tau_l)
421
+ maxlen = max(2 * tau + 5, 64)
422
+ epi_hist = G.graph.get("_epi_hist")
423
+ if not isinstance(epi_hist, deque) or epi_hist.maxlen != maxlen:
424
+ epi_hist = deque(list(epi_hist or [])[-maxlen:], maxlen=maxlen)
425
+ G.graph["_epi_hist"] = epi_hist
426
+ epi_hist.append(
427
+ {n: get_attr(nd, ALIAS_EPI, 0.0) for n, nd in G.nodes(data=True)}
428
+ )
429
+
430
+
431
+ def _maybe_remesh(G: TNFRGraph) -> None:
432
+ apply_remesh_if_globally_stable(G)
433
+
434
+
435
+ def _run_validators(G: TNFRGraph) -> None:
436
+ from ..utils import run_validators
437
+
438
+ run_validators(G)
439
+
440
+
441
+ def _run_after_callbacks(G, *, step_idx: int) -> None:
442
+ h = ensure_history(G)
443
+ ctx = {"step": step_idx}
444
+ metric_pairs = [
445
+ ("C", "C_steps"),
446
+ ("stable_frac", "stable_frac"),
447
+ ("phase_sync", "phase_sync"),
448
+ ("glyph_disr", "glyph_load_disr"),
449
+ ("Si_mean", "Si_mean"),
450
+ ]
451
+ for dst, src in metric_pairs:
452
+ values = h.get(src)
453
+ if values:
454
+ ctx[dst] = values[-1]
455
+ callback_manager.invoke_callbacks(G, CallbackEvent.AFTER_STEP.value, ctx)
456
+
457
+
458
+ def step(
459
+ G: TNFRGraph,
460
+ *,
461
+ dt: float | None = None,
462
+ use_Si: bool = True,
463
+ apply_glyphs: bool = True,
464
+ n_jobs: Mapping[str, Any] | None = None,
465
+ ) -> None:
466
+ job_overrides = _normalize_job_overrides(n_jobs)
467
+ hist = ensure_history(G)
468
+ step_idx = len(hist.setdefault("C_steps", []))
469
+ _run_before_callbacks(
470
+ G, step_idx=step_idx, dt=dt, use_Si=use_Si, apply_glyphs=apply_glyphs
471
+ )
472
+ _update_nodes(
473
+ G,
474
+ dt=dt,
475
+ use_Si=use_Si,
476
+ apply_glyphs=apply_glyphs,
477
+ step_idx=step_idx,
478
+ hist=hist,
479
+ job_overrides=job_overrides,
480
+ )
481
+ _update_epi_hist(G)
482
+ _maybe_remesh(G)
483
+ _run_validators(G)
484
+ _run_after_callbacks(G, step_idx=step_idx)
485
+
486
+
487
+ def run(
488
+ G: TNFRGraph,
489
+ steps: int,
490
+ *,
491
+ dt: float | None = None,
492
+ use_Si: bool = True,
493
+ apply_glyphs: bool = True,
494
+ n_jobs: Mapping[str, Any] | None = None,
495
+ ) -> None:
496
+ steps_int = int(steps)
497
+ if steps_int < 0:
498
+ raise ValueError("'steps' must be non-negative")
499
+ stop_cfg = get_graph_param(G, "STOP_EARLY", dict)
500
+ stop_enabled = False
501
+ if stop_cfg and stop_cfg.get("enabled", False):
502
+ w = int(stop_cfg.get("window", 25))
503
+ frac = float(stop_cfg.get("fraction", 0.90))
504
+ stop_enabled = True
505
+ job_overrides = _normalize_job_overrides(n_jobs)
506
+ for _ in range(steps_int):
507
+ step(
508
+ G,
509
+ dt=dt,
510
+ use_Si=use_Si,
511
+ apply_glyphs=apply_glyphs,
512
+ n_jobs=job_overrides,
513
+ )
514
+ if stop_enabled:
515
+ history = ensure_history(G)
516
+ series = history.get("stable_frac", [])
517
+ if not isinstance(series, list):
518
+ series = list(series)
519
+ if len(series) >= w and all(v >= frac for v in series[-w:]):
520
+ break
521
+
tnfr/dynamics/sampling.py CHANGED
@@ -1,26 +1,29 @@
1
1
  from __future__ import annotations
2
2
 
3
- from ..cache import cached_node_list
3
+ from typing import cast
4
+
4
5
  from ..rng import _rng_for_step, base_seed
6
+ from ..types import NodeId, TNFRGraph
7
+ from ..utils import cached_node_list
5
8
 
6
9
  __all__ = ("update_node_sample",)
7
10
 
8
11
 
9
- def update_node_sample(G, *, step: int) -> None:
12
+ def update_node_sample(G: TNFRGraph, *, step: int) -> None:
10
13
  """Refresh ``G.graph['_node_sample']`` with a random subset of nodes.
11
14
 
12
15
  The sample is limited by ``UM_CANDIDATE_COUNT`` and refreshed every
13
16
  simulation step. When the network is small (``< 50`` nodes) or the limit
14
17
  is non‑positive, the full node set is used and sampling is effectively
15
- disabled. A snapshot of nodes is cached via a
16
- :class:`~tnfr.cache.NodeCache` instance stored in
18
+ disabled. A snapshot of nodes is cached via the NodeCache helper from
19
+ ``tnfr.utils`` stored in
17
20
  ``G.graph['_node_list_cache']`` and reused across steps; it is only refreshed
18
21
  when the graph size changes. Sampling operates directly on the cached
19
22
  tuple of nodes.
20
23
  """
21
24
  graph = G.graph
22
25
  limit = int(graph.get("UM_CANDIDATE_COUNT", 0))
23
- nodes = cached_node_list(G)
26
+ nodes = cast(tuple[NodeId, ...], cached_node_list(G))
24
27
  current_n = len(nodes)
25
28
  if limit <= 0 or current_n < 50 or limit >= current_n:
26
29
  graph["_node_sample"] = nodes
@@ -0,0 +1,7 @@
1
+ from typing import Any
2
+
3
+ __all__: Any
4
+
5
+ def __getattr__(name: str) -> Any: ...
6
+
7
+ update_node_sample: Any