tnfr 6.0.0__py3-none-any.whl → 7.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (176) hide show
  1. tnfr/__init__.py +50 -5
  2. tnfr/__init__.pyi +0 -7
  3. tnfr/_compat.py +0 -1
  4. tnfr/_generated_version.py +34 -0
  5. tnfr/_version.py +44 -2
  6. tnfr/alias.py +14 -13
  7. tnfr/alias.pyi +5 -37
  8. tnfr/cache.py +9 -729
  9. tnfr/cache.pyi +8 -224
  10. tnfr/callback_utils.py +16 -31
  11. tnfr/callback_utils.pyi +3 -29
  12. tnfr/cli/__init__.py +17 -11
  13. tnfr/cli/__init__.pyi +0 -21
  14. tnfr/cli/arguments.py +175 -14
  15. tnfr/cli/arguments.pyi +5 -11
  16. tnfr/cli/execution.py +434 -48
  17. tnfr/cli/execution.pyi +14 -24
  18. tnfr/cli/utils.py +20 -3
  19. tnfr/cli/utils.pyi +5 -5
  20. tnfr/config/__init__.py +2 -1
  21. tnfr/config/__init__.pyi +2 -0
  22. tnfr/config/feature_flags.py +83 -0
  23. tnfr/config/init.py +1 -1
  24. tnfr/config/operator_names.py +1 -14
  25. tnfr/config/presets.py +6 -26
  26. tnfr/constants/__init__.py +10 -13
  27. tnfr/constants/__init__.pyi +10 -22
  28. tnfr/constants/aliases.py +31 -0
  29. tnfr/constants/core.py +4 -3
  30. tnfr/constants/init.py +1 -1
  31. tnfr/constants/metric.py +3 -3
  32. tnfr/dynamics/__init__.py +64 -10
  33. tnfr/dynamics/__init__.pyi +3 -4
  34. tnfr/dynamics/adaptation.py +79 -13
  35. tnfr/dynamics/aliases.py +10 -9
  36. tnfr/dynamics/coordination.py +77 -35
  37. tnfr/dynamics/dnfr.py +575 -274
  38. tnfr/dynamics/dnfr.pyi +1 -10
  39. tnfr/dynamics/integrators.py +47 -33
  40. tnfr/dynamics/integrators.pyi +0 -1
  41. tnfr/dynamics/runtime.py +489 -129
  42. tnfr/dynamics/sampling.py +2 -0
  43. tnfr/dynamics/selectors.py +101 -62
  44. tnfr/execution.py +15 -8
  45. tnfr/execution.pyi +5 -25
  46. tnfr/flatten.py +7 -3
  47. tnfr/flatten.pyi +1 -8
  48. tnfr/gamma.py +22 -26
  49. tnfr/gamma.pyi +0 -6
  50. tnfr/glyph_history.py +37 -26
  51. tnfr/glyph_history.pyi +1 -19
  52. tnfr/glyph_runtime.py +16 -0
  53. tnfr/glyph_runtime.pyi +9 -0
  54. tnfr/immutable.py +20 -15
  55. tnfr/immutable.pyi +4 -7
  56. tnfr/initialization.py +5 -7
  57. tnfr/initialization.pyi +1 -9
  58. tnfr/io.py +6 -305
  59. tnfr/io.pyi +13 -8
  60. tnfr/mathematics/__init__.py +81 -0
  61. tnfr/mathematics/backend.py +426 -0
  62. tnfr/mathematics/dynamics.py +398 -0
  63. tnfr/mathematics/epi.py +254 -0
  64. tnfr/mathematics/generators.py +222 -0
  65. tnfr/mathematics/metrics.py +119 -0
  66. tnfr/mathematics/operators.py +233 -0
  67. tnfr/mathematics/operators_factory.py +71 -0
  68. tnfr/mathematics/projection.py +78 -0
  69. tnfr/mathematics/runtime.py +173 -0
  70. tnfr/mathematics/spaces.py +247 -0
  71. tnfr/mathematics/transforms.py +292 -0
  72. tnfr/metrics/__init__.py +10 -10
  73. tnfr/metrics/coherence.py +123 -94
  74. tnfr/metrics/common.py +22 -13
  75. tnfr/metrics/common.pyi +42 -11
  76. tnfr/metrics/core.py +72 -14
  77. tnfr/metrics/diagnosis.py +48 -57
  78. tnfr/metrics/diagnosis.pyi +3 -7
  79. tnfr/metrics/export.py +3 -5
  80. tnfr/metrics/glyph_timing.py +41 -31
  81. tnfr/metrics/reporting.py +13 -6
  82. tnfr/metrics/sense_index.py +884 -114
  83. tnfr/metrics/trig.py +167 -11
  84. tnfr/metrics/trig.pyi +1 -0
  85. tnfr/metrics/trig_cache.py +112 -15
  86. tnfr/node.py +400 -17
  87. tnfr/node.pyi +55 -38
  88. tnfr/observers.py +111 -8
  89. tnfr/observers.pyi +0 -15
  90. tnfr/ontosim.py +9 -6
  91. tnfr/ontosim.pyi +0 -5
  92. tnfr/operators/__init__.py +529 -42
  93. tnfr/operators/__init__.pyi +14 -0
  94. tnfr/operators/definitions.py +350 -18
  95. tnfr/operators/definitions.pyi +0 -14
  96. tnfr/operators/grammar.py +760 -0
  97. tnfr/operators/jitter.py +28 -22
  98. tnfr/operators/registry.py +7 -12
  99. tnfr/operators/registry.pyi +0 -2
  100. tnfr/operators/remesh.py +38 -61
  101. tnfr/rng.py +17 -300
  102. tnfr/schemas/__init__.py +8 -0
  103. tnfr/schemas/grammar.json +94 -0
  104. tnfr/selector.py +3 -4
  105. tnfr/selector.pyi +1 -1
  106. tnfr/sense.py +22 -24
  107. tnfr/sense.pyi +0 -7
  108. tnfr/structural.py +504 -21
  109. tnfr/structural.pyi +41 -18
  110. tnfr/telemetry/__init__.py +23 -1
  111. tnfr/telemetry/cache_metrics.py +226 -0
  112. tnfr/telemetry/nu_f.py +423 -0
  113. tnfr/telemetry/nu_f.pyi +123 -0
  114. tnfr/tokens.py +1 -4
  115. tnfr/tokens.pyi +1 -6
  116. tnfr/trace.py +20 -53
  117. tnfr/trace.pyi +9 -37
  118. tnfr/types.py +244 -15
  119. tnfr/types.pyi +200 -14
  120. tnfr/units.py +69 -0
  121. tnfr/units.pyi +16 -0
  122. tnfr/utils/__init__.py +107 -48
  123. tnfr/utils/__init__.pyi +80 -11
  124. tnfr/utils/cache.py +1705 -65
  125. tnfr/utils/cache.pyi +370 -58
  126. tnfr/utils/chunks.py +104 -0
  127. tnfr/utils/chunks.pyi +21 -0
  128. tnfr/utils/data.py +95 -5
  129. tnfr/utils/data.pyi +8 -17
  130. tnfr/utils/graph.py +2 -4
  131. tnfr/utils/init.py +31 -7
  132. tnfr/utils/init.pyi +4 -11
  133. tnfr/utils/io.py +313 -14
  134. tnfr/{helpers → utils}/numeric.py +50 -24
  135. tnfr/utils/numeric.pyi +21 -0
  136. tnfr/validation/__init__.py +92 -4
  137. tnfr/validation/__init__.pyi +77 -17
  138. tnfr/validation/compatibility.py +79 -43
  139. tnfr/validation/compatibility.pyi +4 -6
  140. tnfr/validation/grammar.py +55 -133
  141. tnfr/validation/grammar.pyi +37 -8
  142. tnfr/validation/graph.py +138 -0
  143. tnfr/validation/graph.pyi +17 -0
  144. tnfr/validation/rules.py +161 -74
  145. tnfr/validation/rules.pyi +55 -18
  146. tnfr/validation/runtime.py +263 -0
  147. tnfr/validation/runtime.pyi +31 -0
  148. tnfr/validation/soft_filters.py +170 -0
  149. tnfr/validation/soft_filters.pyi +37 -0
  150. tnfr/validation/spectral.py +159 -0
  151. tnfr/validation/spectral.pyi +46 -0
  152. tnfr/validation/syntax.py +28 -139
  153. tnfr/validation/syntax.pyi +7 -4
  154. tnfr/validation/window.py +39 -0
  155. tnfr/validation/window.pyi +1 -0
  156. tnfr/viz/__init__.py +9 -0
  157. tnfr/viz/matplotlib.py +246 -0
  158. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/METADATA +63 -19
  159. tnfr-7.0.0.dist-info/RECORD +185 -0
  160. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/licenses/LICENSE.md +1 -1
  161. tnfr/constants_glyphs.py +0 -16
  162. tnfr/constants_glyphs.pyi +0 -12
  163. tnfr/grammar.py +0 -25
  164. tnfr/grammar.pyi +0 -13
  165. tnfr/helpers/__init__.py +0 -151
  166. tnfr/helpers/__init__.pyi +0 -66
  167. tnfr/helpers/numeric.pyi +0 -12
  168. tnfr/presets.py +0 -15
  169. tnfr/presets.pyi +0 -7
  170. tnfr/utils/io.pyi +0 -10
  171. tnfr/utils/validators.py +0 -130
  172. tnfr/utils/validators.pyi +0 -19
  173. tnfr-6.0.0.dist-info/RECORD +0 -157
  174. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/WHEEL +0 -0
  175. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/entry_points.txt +0 -0
  176. {tnfr-6.0.0.dist-info → tnfr-7.0.0.dist-info}/top_level.txt +0 -0
tnfr/operators/jitter.py CHANGED
@@ -1,27 +1,31 @@
1
- from __future__ import annotations
2
- import threading
1
+ """Jitter operators for reproducible phase perturbations."""
3
2
 
4
- from typing import Any, TYPE_CHECKING, cast
3
+ from __future__ import annotations
5
4
 
6
- from cachetools import LRUCache
5
+ import threading
6
+ from typing import TYPE_CHECKING, Any, cast
7
7
 
8
+ from ..rng import base_seed, cache_enabled
9
+ from ..rng import clear_rng_cache as _clear_rng_cache
8
10
  from ..rng import (
9
- ScopedCounterCache,
10
11
  make_rng,
11
- base_seed,
12
- cache_enabled,
13
- clear_rng_cache as _clear_rng_cache,
14
12
  seed_hash,
15
13
  )
16
- from ..cache import CacheManager
17
- from ..utils import ensure_node_offset_map, get_nodenx
18
14
  from ..types import NodeId, TNFRGraph
15
+ from ..utils import (
16
+ CacheManager,
17
+ InstrumentedLRUCache,
18
+ ScopedCounterCache,
19
+ build_cache_manager,
20
+ ensure_node_offset_map,
21
+ get_nodenx,
22
+ )
19
23
 
20
24
  if TYPE_CHECKING: # pragma: no cover - type checking only
21
25
  from ..node import NodeProtocol
22
26
 
23
27
  # Guarded by the cache lock to ensure thread-safe access. ``seq`` stores
24
- # per-scope jitter sequence counters in an LRU cache bounded to avoid
28
+ # per-scope jitter sequence counters in an instrumented LRU cache bounded to avoid
25
29
  # unbounded memory usage.
26
30
  _JITTER_MAX_ENTRIES = 1024
27
31
 
@@ -35,7 +39,7 @@ class JitterCache:
35
39
  *,
36
40
  manager: CacheManager | None = None,
37
41
  ) -> None:
38
- self._manager = manager or CacheManager()
42
+ self._manager = manager or build_cache_manager()
39
43
  if not self._manager.has_override("scoped_counter:jitter"):
40
44
  self._manager.configure(
41
45
  overrides={"scoped_counter:jitter": int(max_entries)}
@@ -69,8 +73,8 @@ class JitterCache:
69
73
  return self._manager
70
74
 
71
75
  @property
72
- def seq(self) -> LRUCache[tuple[int, int], int]:
73
- """Expose the sequence cache for tests and diagnostics."""
76
+ def seq(self) -> InstrumentedLRUCache[tuple[int, int], int]:
77
+ """Expose the instrumented sequence cache for tests and diagnostics."""
74
78
 
75
79
  return self._sequence.cache
76
80
 
@@ -99,9 +103,7 @@ class JitterCache:
99
103
 
100
104
  return cast(dict[str, Any], self._manager.get(self._settings_key))
101
105
 
102
- def setup(
103
- self, force: bool = False, max_entries: int | None = None
104
- ) -> None:
106
+ def setup(self, force: bool = False, max_entries: int | None = None) -> None:
105
107
  """Ensure jitter cache matches the configured size."""
106
108
 
107
109
  self._sequence.configure(force=force, max_entries=max_entries)
@@ -133,20 +135,26 @@ class JitterCacheManager:
133
135
  self.cache = cache
134
136
  self._manager = cache.manager
135
137
  else:
136
- self._manager = manager or CacheManager()
138
+ self._manager = manager or build_cache_manager()
137
139
  self.cache = JitterCache(manager=self._manager)
138
140
 
139
141
  # Convenience passthrough properties
140
142
  @property
141
- def seq(self) -> LRUCache[tuple[int, int], int]:
143
+ def seq(self) -> InstrumentedLRUCache[tuple[int, int], int]:
144
+ """Expose the underlying instrumented jitter sequence cache."""
145
+
142
146
  return self.cache.seq
143
147
 
144
148
  @property
145
149
  def settings(self) -> dict[str, Any]:
150
+ """Return persisted jitter cache configuration."""
151
+
146
152
  return self.cache.settings
147
153
 
148
154
  @property
149
155
  def lock(self) -> threading.Lock | threading.RLock:
156
+ """Return the lock associated with the jitter cache."""
157
+
150
158
  return self.cache.lock
151
159
 
152
160
  @property
@@ -161,9 +169,7 @@ class JitterCacheManager:
161
169
 
162
170
  self.cache.max_entries = value
163
171
 
164
- def setup(
165
- self, force: bool = False, max_entries: int | None = None
166
- ) -> None:
172
+ def setup(self, force: bool = False, max_entries: int | None = None) -> None:
167
173
  """Ensure jitter cache matches the configured size.
168
174
 
169
175
  ``max_entries`` may be provided to explicitly resize the cache.
@@ -4,7 +4,7 @@ from __future__ import annotations
4
4
 
5
5
  import importlib
6
6
  import pkgutil
7
- from typing import Any, TYPE_CHECKING
7
+ from typing import TYPE_CHECKING
8
8
 
9
9
  from ..config.operator_names import canonical_operator_name
10
10
 
@@ -67,14 +67,9 @@ def discover_operators() -> None:
67
67
  setattr(package, "_operators_discovered", True)
68
68
 
69
69
 
70
- __all__ = ("OPERATORS", "register_operator", "discover_operators", "get_operator_class")
71
-
72
-
73
- def __getattr__(name: str) -> Any:
74
- """Provide guidance for legacy registry aliases."""
75
-
76
- if name == "OPERADORES":
77
- raise AttributeError(
78
- f"module '{__name__}' has no attribute '{name}'; use 'OPERATORS' instead."
79
- )
80
- raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
70
+ __all__ = (
71
+ "OPERATORS",
72
+ "register_operator",
73
+ "discover_operators",
74
+ "get_operator_class",
75
+ )
@@ -9,7 +9,5 @@ def __getattr__(name: str) -> Any: ...
9
9
  OPERATORS: dict[str, type[Operator]]
10
10
 
11
11
  def discover_operators() -> None: ...
12
-
13
12
  def register_operator(cls: type[Operator]) -> type[Operator]: ...
14
-
15
13
  def get_operator_class(name: str) -> type[Operator]: ...
tnfr/operators/remesh.py CHANGED
@@ -1,51 +1,36 @@
1
+ """Adaptive remeshing operators preserving TNFR structural coherence."""
2
+
1
3
  from __future__ import annotations
2
4
 
3
5
  import hashlib
4
6
  import heapq
5
7
  import random
6
- from operator import ge, le
7
- from functools import cache
8
- from itertools import combinations
9
- from io import StringIO
10
8
  from collections import deque
11
9
  from collections.abc import Hashable, Iterable, Mapping, MutableMapping, Sequence
12
- from statistics import fmean, StatisticsError
10
+ from functools import cache
11
+ from io import StringIO
12
+ from itertools import combinations
13
+ from operator import ge, le
14
+ from statistics import StatisticsError, fmean
13
15
  from types import ModuleType
14
- from typing import Any, TypedDict, cast
16
+ from typing import TYPE_CHECKING, Any, cast
15
17
 
16
18
  from .._compat import TypeAlias
17
-
18
- from ..constants import DEFAULTS, REMESH_DEFAULTS, get_aliases, get_param
19
- from ..helpers.numeric import kahan_sum_nd
20
19
  from ..alias import get_attr, set_attr
20
+ from ..constants import DEFAULTS, REMESH_DEFAULTS, get_param
21
+ from ..constants.aliases import ALIAS_EPI
21
22
  from ..rng import make_rng
22
- from ..callback_utils import CallbackEvent, callback_manager
23
- from ..glyph_history import append_metric, ensure_history, current_step_idx
24
- from ..utils import cached_import, edge_version_update
23
+ from ..types import RemeshMeta
24
+ from ..utils import cached_import, edge_version_update, kahan_sum_nd
25
+
26
+ if TYPE_CHECKING: # pragma: no cover - type checking only
27
+ from ..callback_utils import CallbackEvent, CallbackManager
25
28
 
26
29
  CommunityGraph: TypeAlias = Any
27
30
  NetworkxModule: TypeAlias = ModuleType
28
31
  CommunityModule: TypeAlias = ModuleType
29
32
  RemeshEdge: TypeAlias = tuple[Hashable, Hashable]
30
33
  NetworkxModules: TypeAlias = tuple[NetworkxModule, CommunityModule]
31
-
32
-
33
- class RemeshMeta(TypedDict, total=False):
34
- alpha: float
35
- alpha_source: str
36
- tau_global: int
37
- tau_local: int
38
- step: int | None
39
- topo_hash: str | None
40
- epi_mean_before: float
41
- epi_mean_after: float
42
- epi_checksum_before: str
43
- epi_checksum_after: str
44
- stable_frac_last: float
45
- phase_sync_last: float
46
- glyph_disr_last: float
47
-
48
-
49
34
  RemeshConfigValue: TypeAlias = bool | float | int
50
35
 
51
36
 
@@ -66,9 +51,6 @@ def _ordered_edge(u: Hashable, v: Hashable) -> RemeshEdge:
66
51
  return (u, v) if repr(u) <= repr(v) else (v, u)
67
52
 
68
53
 
69
- ALIAS_EPI = get_aliases("EPI")
70
-
71
-
72
54
  COOLDOWN_KEY = "REMESH_COOLDOWN_WINDOW"
73
55
 
74
56
 
@@ -91,9 +73,7 @@ def _get_networkx_modules() -> NetworkxModules:
91
73
 
92
74
  def _remesh_alpha_info(G: CommunityGraph) -> tuple[float, str]:
93
75
  """Return ``(alpha, source)`` with explicit precedence."""
94
- if bool(
95
- G.graph.get("REMESH_ALPHA_HARD", REMESH_DEFAULTS["REMESH_ALPHA_HARD"])
96
- ):
76
+ if bool(G.graph.get("REMESH_ALPHA_HARD", REMESH_DEFAULTS["REMESH_ALPHA_HARD"])):
97
77
  val = _as_float(
98
78
  G.graph.get("REMESH_ALPHA", REMESH_DEFAULTS["REMESH_ALPHA"]),
99
79
  float(REMESH_DEFAULTS["REMESH_ALPHA"]),
@@ -138,17 +118,19 @@ def _snapshot_epi(G: CommunityGraph) -> tuple[float, str]:
138
118
 
139
119
  def _log_remesh_event(G: CommunityGraph, meta: RemeshMeta) -> None:
140
120
  """Store remesh metadata and optionally log and trigger callbacks."""
121
+ from ..callback_utils import CallbackEvent, callback_manager
122
+ from ..glyph_history import append_metric
123
+
141
124
  G.graph["_REMESH_META"] = meta
142
125
  if G.graph.get("REMESH_LOG_EVENTS", REMESH_DEFAULTS["REMESH_LOG_EVENTS"]):
143
126
  hist = G.graph.setdefault("history", {})
144
127
  append_metric(hist, "remesh_events", dict(meta))
145
- callback_manager.invoke_callbacks(
146
- G, CallbackEvent.ON_REMESH.value, dict(meta)
147
- )
128
+ callback_manager.invoke_callbacks(G, CallbackEvent.ON_REMESH.value, dict(meta))
148
129
 
149
130
 
150
131
  def apply_network_remesh(G: CommunityGraph) -> None:
151
132
  """Network-scale REMESH using ``_epi_hist`` with multi-scale memory."""
133
+ from ..glyph_history import current_step_idx, ensure_history
152
134
  nx, _ = _get_networkx_modules()
153
135
  tau_g = int(get_param(G, "REMESH_TAU_GLOBAL"))
154
136
  tau_l = int(get_param(G, "REMESH_TAU_LOCAL"))
@@ -167,8 +149,12 @@ def apply_network_remesh(G: CommunityGraph) -> None:
167
149
 
168
150
  for n, nd in G.nodes(data=True):
169
151
  epi_now = _as_float(get_attr(nd, ALIAS_EPI, 0.0))
170
- epi_old_l = _as_float(past_l.get(n) if isinstance(past_l, Mapping) else None, epi_now)
171
- epi_old_g = _as_float(past_g.get(n) if isinstance(past_g, Mapping) else None, epi_now)
152
+ epi_old_l = _as_float(
153
+ past_l.get(n) if isinstance(past_l, Mapping) else None, epi_now
154
+ )
155
+ epi_old_g = _as_float(
156
+ past_g.get(n) if isinstance(past_g, Mapping) else None, epi_now
157
+ )
172
158
  mixed = (1 - alpha) * epi_now + alpha * epi_old_l
173
159
  mixed = (1 - alpha) * mixed + alpha * epi_old_g
174
160
  set_attr(nd, ALIAS_EPI, mixed)
@@ -212,10 +198,7 @@ def _mst_edges_from_epi(
212
198
  H.add_weighted_edges_from(
213
199
  (u, v, abs(epi[u] - epi[v])) for u, v in combinations(nodes, 2)
214
200
  )
215
- return {
216
- _ordered_edge(u, v)
217
- for u, v in nx.minimum_spanning_edges(H, data=False)
218
- }
201
+ return {_ordered_edge(u, v) for u, v in nx.minimum_spanning_edges(H, data=False)}
219
202
 
220
203
 
221
204
  def _knn_edges(
@@ -332,6 +315,7 @@ def _community_remesh(
332
315
  n_before: int,
333
316
  ) -> None:
334
317
  """Remesh ``G`` replacing nodes by modular communities."""
318
+ from ..glyph_history import append_metric
335
319
  comms = list(nx_comm.greedy_modularity_communities(G))
336
320
  if len(comms) <= 1:
337
321
  with edge_version_update(G):
@@ -340,9 +324,7 @@ def _community_remesh(
340
324
  return
341
325
  C = _community_graph(comms, epi, nx)
342
326
  mst_c = nx.minimum_spanning_tree(C, weight="weight")
343
- new_edges: set[RemeshEdge] = {
344
- _ordered_edge(u, v) for u, v in mst_c.edges()
345
- }
327
+ new_edges: set[RemeshEdge] = {_ordered_edge(u, v) for u, v in mst_c.edges()}
346
328
  extra_edges, attempts, rewired_edges = _community_k_neighbor_edges(
347
329
  C, k_val, p_rewire, rnd
348
330
  )
@@ -398,6 +380,7 @@ def apply_topological_remesh(
398
380
  When ``seed`` is ``None`` the RNG draws its base seed from
399
381
  ``G.graph['RANDOM_SEED']`` to keep runs reproducible.
400
382
  """
383
+ from ..glyph_history import append_metric
401
384
  nodes = list(G.nodes())
402
385
  n_before = len(nodes)
403
386
  if n_before <= 1:
@@ -410,18 +393,14 @@ def apply_topological_remesh(
410
393
 
411
394
  if mode is None:
412
395
  mode = str(
413
- G.graph.get(
414
- "REMESH_MODE", REMESH_DEFAULTS.get("REMESH_MODE", "knn")
415
- )
396
+ G.graph.get("REMESH_MODE", REMESH_DEFAULTS.get("REMESH_MODE", "knn"))
416
397
  )
417
398
  mode = str(mode)
418
399
  nx, nx_comm = _get_networkx_modules()
419
400
  epi = {n: _as_float(get_attr(G.nodes[n], ALIAS_EPI, 0.0)) for n in nodes}
420
401
  mst_edges = _mst_edges_from_epi(nx, nodes, epi)
421
402
  default_k = int(
422
- G.graph.get(
423
- "REMESH_COMMUNITY_K", REMESH_DEFAULTS.get("REMESH_COMMUNITY_K", 2)
424
- )
403
+ G.graph.get("REMESH_COMMUNITY_K", REMESH_DEFAULTS.get("REMESH_COMMUNITY_K", 2))
425
404
  )
426
405
  k_val = max(1, int(k) if k is not None else default_k)
427
406
 
@@ -477,6 +456,9 @@ def apply_remesh_if_globally_stable(
477
456
  stable_step_window: int | None = None,
478
457
  **kwargs: Any,
479
458
  ) -> None:
459
+ """Trigger remeshing when global stability indicators satisfy thresholds."""
460
+
461
+ from ..glyph_history import ensure_history
480
462
  if kwargs:
481
463
  unexpected = ", ".join(sorted(kwargs))
482
464
  raise TypeError(
@@ -540,9 +522,7 @@ def apply_remesh_if_globally_stable(
540
522
  win_sf = sf[-w_estab:]
541
523
  if not all(v >= frac_req for v in win_sf):
542
524
  return
543
- if cfg["REMESH_REQUIRE_STABILITY"] and not _extra_gating_ok(
544
- hist, cfg, w_estab
545
- ):
525
+ if cfg["REMESH_REQUIRE_STABILITY"] and not _extra_gating_ok(hist, cfg, w_estab):
546
526
  return
547
527
 
548
528
  last = G.graph.get("_last_remesh_step", -(10**9))
@@ -551,10 +531,7 @@ def apply_remesh_if_globally_stable(
551
531
  return
552
532
  t_now = _as_float(G.graph.get("_t", 0.0))
553
533
  last_ts = _as_float(G.graph.get("_last_remesh_ts", -1e12))
554
- if (
555
- cfg["REMESH_COOLDOWN_TS"] > 0
556
- and (t_now - last_ts) < cfg["REMESH_COOLDOWN_TS"]
557
- ):
534
+ if cfg["REMESH_COOLDOWN_TS"] > 0 and (t_now - last_ts) < cfg["REMESH_COOLDOWN_TS"]:
558
535
  return
559
536
 
560
537
  apply_network_remesh(G)