tnfr 4.5.2__py3-none-any.whl → 6.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (161) hide show
  1. tnfr/__init__.py +228 -49
  2. tnfr/__init__.pyi +40 -0
  3. tnfr/_compat.py +11 -0
  4. tnfr/_version.py +7 -0
  5. tnfr/_version.pyi +7 -0
  6. tnfr/alias.py +106 -21
  7. tnfr/alias.pyi +140 -0
  8. tnfr/cache.py +666 -512
  9. tnfr/cache.pyi +232 -0
  10. tnfr/callback_utils.py +2 -9
  11. tnfr/callback_utils.pyi +105 -0
  12. tnfr/cli/__init__.py +21 -7
  13. tnfr/cli/__init__.pyi +47 -0
  14. tnfr/cli/arguments.py +42 -20
  15. tnfr/cli/arguments.pyi +33 -0
  16. tnfr/cli/execution.py +54 -20
  17. tnfr/cli/execution.pyi +80 -0
  18. tnfr/cli/utils.py +0 -2
  19. tnfr/cli/utils.pyi +8 -0
  20. tnfr/config/__init__.py +12 -0
  21. tnfr/config/__init__.pyi +8 -0
  22. tnfr/config/constants.py +104 -0
  23. tnfr/config/constants.pyi +12 -0
  24. tnfr/{config.py → config/init.py} +11 -7
  25. tnfr/config/init.pyi +8 -0
  26. tnfr/config/operator_names.py +106 -0
  27. tnfr/config/operator_names.pyi +28 -0
  28. tnfr/config/presets.py +104 -0
  29. tnfr/config/presets.pyi +7 -0
  30. tnfr/constants/__init__.py +78 -24
  31. tnfr/constants/__init__.pyi +104 -0
  32. tnfr/constants/core.py +1 -2
  33. tnfr/constants/core.pyi +17 -0
  34. tnfr/constants/init.pyi +12 -0
  35. tnfr/constants/metric.py +4 -12
  36. tnfr/constants/metric.pyi +19 -0
  37. tnfr/constants_glyphs.py +9 -91
  38. tnfr/constants_glyphs.pyi +12 -0
  39. tnfr/dynamics/__init__.py +112 -634
  40. tnfr/dynamics/__init__.pyi +83 -0
  41. tnfr/dynamics/adaptation.py +201 -0
  42. tnfr/dynamics/aliases.py +22 -0
  43. tnfr/dynamics/coordination.py +343 -0
  44. tnfr/dynamics/dnfr.py +1936 -354
  45. tnfr/dynamics/dnfr.pyi +33 -0
  46. tnfr/dynamics/integrators.py +369 -75
  47. tnfr/dynamics/integrators.pyi +35 -0
  48. tnfr/dynamics/runtime.py +521 -0
  49. tnfr/dynamics/sampling.py +8 -5
  50. tnfr/dynamics/sampling.pyi +7 -0
  51. tnfr/dynamics/selectors.py +680 -0
  52. tnfr/execution.py +56 -41
  53. tnfr/execution.pyi +65 -0
  54. tnfr/flatten.py +7 -7
  55. tnfr/flatten.pyi +28 -0
  56. tnfr/gamma.py +54 -37
  57. tnfr/gamma.pyi +40 -0
  58. tnfr/glyph_history.py +85 -38
  59. tnfr/glyph_history.pyi +53 -0
  60. tnfr/grammar.py +19 -338
  61. tnfr/grammar.pyi +13 -0
  62. tnfr/helpers/__init__.py +110 -30
  63. tnfr/helpers/__init__.pyi +66 -0
  64. tnfr/helpers/numeric.py +1 -0
  65. tnfr/helpers/numeric.pyi +12 -0
  66. tnfr/immutable.py +55 -19
  67. tnfr/immutable.pyi +37 -0
  68. tnfr/initialization.py +12 -10
  69. tnfr/initialization.pyi +73 -0
  70. tnfr/io.py +99 -34
  71. tnfr/io.pyi +11 -0
  72. tnfr/locking.pyi +7 -0
  73. tnfr/metrics/__init__.pyi +20 -0
  74. tnfr/metrics/coherence.py +934 -294
  75. tnfr/metrics/common.py +1 -3
  76. tnfr/metrics/common.pyi +15 -0
  77. tnfr/metrics/core.py +192 -34
  78. tnfr/metrics/core.pyi +13 -0
  79. tnfr/metrics/diagnosis.py +707 -101
  80. tnfr/metrics/diagnosis.pyi +89 -0
  81. tnfr/metrics/export.py +27 -13
  82. tnfr/metrics/glyph_timing.py +218 -38
  83. tnfr/metrics/reporting.py +22 -18
  84. tnfr/metrics/reporting.pyi +12 -0
  85. tnfr/metrics/sense_index.py +199 -25
  86. tnfr/metrics/sense_index.pyi +9 -0
  87. tnfr/metrics/trig.py +53 -18
  88. tnfr/metrics/trig.pyi +12 -0
  89. tnfr/metrics/trig_cache.py +3 -7
  90. tnfr/metrics/trig_cache.pyi +10 -0
  91. tnfr/node.py +148 -125
  92. tnfr/node.pyi +161 -0
  93. tnfr/observers.py +44 -30
  94. tnfr/observers.pyi +46 -0
  95. tnfr/ontosim.py +14 -13
  96. tnfr/ontosim.pyi +33 -0
  97. tnfr/operators/__init__.py +84 -52
  98. tnfr/operators/__init__.pyi +31 -0
  99. tnfr/operators/definitions.py +181 -0
  100. tnfr/operators/definitions.pyi +92 -0
  101. tnfr/operators/jitter.py +86 -23
  102. tnfr/operators/jitter.pyi +11 -0
  103. tnfr/operators/registry.py +80 -0
  104. tnfr/operators/registry.pyi +15 -0
  105. tnfr/operators/remesh.py +141 -57
  106. tnfr/presets.py +9 -54
  107. tnfr/presets.pyi +7 -0
  108. tnfr/py.typed +0 -0
  109. tnfr/rng.py +259 -73
  110. tnfr/rng.pyi +14 -0
  111. tnfr/selector.py +24 -17
  112. tnfr/selector.pyi +19 -0
  113. tnfr/sense.py +55 -43
  114. tnfr/sense.pyi +30 -0
  115. tnfr/structural.py +44 -267
  116. tnfr/structural.pyi +46 -0
  117. tnfr/telemetry/__init__.py +13 -0
  118. tnfr/telemetry/verbosity.py +37 -0
  119. tnfr/tokens.py +3 -2
  120. tnfr/tokens.pyi +41 -0
  121. tnfr/trace.py +272 -82
  122. tnfr/trace.pyi +68 -0
  123. tnfr/types.py +345 -6
  124. tnfr/types.pyi +145 -0
  125. tnfr/utils/__init__.py +158 -0
  126. tnfr/utils/__init__.pyi +133 -0
  127. tnfr/utils/cache.py +755 -0
  128. tnfr/utils/cache.pyi +156 -0
  129. tnfr/{collections_utils.py → utils/data.py} +57 -90
  130. tnfr/utils/data.pyi +73 -0
  131. tnfr/utils/graph.py +87 -0
  132. tnfr/utils/graph.pyi +10 -0
  133. tnfr/utils/init.py +746 -0
  134. tnfr/utils/init.pyi +85 -0
  135. tnfr/{json_utils.py → utils/io.py} +13 -18
  136. tnfr/utils/io.pyi +10 -0
  137. tnfr/utils/validators.py +130 -0
  138. tnfr/utils/validators.pyi +19 -0
  139. tnfr/validation/__init__.py +25 -0
  140. tnfr/validation/__init__.pyi +17 -0
  141. tnfr/validation/compatibility.py +59 -0
  142. tnfr/validation/compatibility.pyi +8 -0
  143. tnfr/validation/grammar.py +149 -0
  144. tnfr/validation/grammar.pyi +11 -0
  145. tnfr/validation/rules.py +194 -0
  146. tnfr/validation/rules.pyi +18 -0
  147. tnfr/validation/syntax.py +151 -0
  148. tnfr/validation/syntax.pyi +7 -0
  149. tnfr-6.0.0.dist-info/METADATA +135 -0
  150. tnfr-6.0.0.dist-info/RECORD +157 -0
  151. tnfr/graph_utils.py +0 -84
  152. tnfr/import_utils.py +0 -228
  153. tnfr/logging_utils.py +0 -116
  154. tnfr/validators.py +0 -84
  155. tnfr/value_utils.py +0 -59
  156. tnfr-4.5.2.dist-info/METADATA +0 -379
  157. tnfr-4.5.2.dist-info/RECORD +0 -67
  158. {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/WHEEL +0 -0
  159. {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/entry_points.txt +0 -0
  160. {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/licenses/LICENSE.md +0 -0
  161. {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/top_level.txt +0 -0
tnfr/dynamics/dnfr.pyi ADDED
@@ -0,0 +1,33 @@
1
+ from typing import Any
2
+
3
+ from tnfr.types import DeltaNFRHook, TNFRGraph
4
+
5
+ __all__: tuple[str, ...]
6
+
7
+ def default_compute_delta_nfr(
8
+ G: TNFRGraph,
9
+ *,
10
+ cache_size: int | None = ...,
11
+ n_jobs: int | None = ...,
12
+ ) -> None: ...
13
+
14
+
15
+ def dnfr_epi_vf_mixed(G: TNFRGraph, *, n_jobs: int | None = ...) -> None: ...
16
+
17
+
18
+ def dnfr_laplacian(G: TNFRGraph, *, n_jobs: int | None = ...) -> None: ...
19
+
20
+
21
+ def dnfr_phase_only(G: TNFRGraph, *, n_jobs: int | None = ...) -> None: ...
22
+
23
+
24
+ def set_delta_nfr_hook(
25
+ G: TNFRGraph,
26
+ func: DeltaNFRHook,
27
+ *,
28
+ name: str | None = ...,
29
+ note: str | None = ...,
30
+ ) -> None: ...
31
+
32
+
33
+ def __getattr__(name: str) -> Any: ...
@@ -1,17 +1,23 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import math
4
- from collections.abc import Mapping
5
- from typing import Any, Literal
4
+ from abc import ABC, abstractmethod
5
+ from collections.abc import Iterable, Mapping
6
+ from concurrent.futures import ProcessPoolExecutor
7
+ from multiprocessing import get_context
8
+ from typing import Any, Literal, cast
6
9
 
7
- import networkx as nx # type: ignore[import-untyped]
10
+ import networkx as nx
8
11
 
9
12
  from ..constants import (
10
13
  DEFAULTS,
11
14
  get_aliases,
12
15
  )
13
16
  from ..gamma import _get_gamma_spec, eval_gamma
14
- from ..alias import get_attr, get_attr_str, set_attr, set_attr_str
17
+ from ..alias import collect_attr, get_attr, get_attr_str, set_attr, set_attr_str
18
+ from ..utils import get_numpy
19
+ from ..types import NodeId, TNFRGraph
20
+ from .._compat import TypeAlias
15
21
 
16
22
  ALIAS_VF = get_aliases("VF")
17
23
  ALIAS_DNFR = get_aliases("DNFR")
@@ -21,17 +27,130 @@ ALIAS_EPI_KIND = get_aliases("EPI_KIND")
21
27
  ALIAS_D2EPI = get_aliases("D2EPI")
22
28
 
23
29
  __all__ = (
30
+ "AbstractIntegrator",
31
+ "DefaultIntegrator",
24
32
  "prepare_integration_params",
25
33
  "update_epi_via_nodal_equation",
26
34
  )
27
35
 
28
36
 
37
+ GammaMap: TypeAlias = dict[NodeId, float]
38
+ """Γ evaluation cache keyed by node identifier."""
39
+
40
+ NodeIncrements: TypeAlias = dict[NodeId, tuple[float, ...]]
41
+ """Mapping of nodes to staged integration increments."""
42
+
43
+ NodalUpdate: TypeAlias = dict[NodeId, tuple[float, float, float]]
44
+ """Mapping of nodes to ``(EPI, dEPI/dt, ∂²EPI/∂t²)`` tuples."""
45
+
46
+ IntegratorMethod: TypeAlias = Literal["euler", "rk4"]
47
+ """Supported explicit integration schemes for nodal updates."""
48
+
49
+
50
+ _PARALLEL_GRAPH: TNFRGraph | None = None
51
+
52
+
53
+ def _gamma_worker_init(graph: TNFRGraph) -> None:
54
+ """Initialise process-local graph reference for Γ evaluation."""
55
+
56
+ global _PARALLEL_GRAPH
57
+ _PARALLEL_GRAPH = graph
58
+
59
+
60
+ def _gamma_worker(task: tuple[list[NodeId], float]) -> list[tuple[NodeId, float]]:
61
+ """Evaluate Γ for ``task`` chunk using process-local graph."""
62
+
63
+ chunk, t = task
64
+ if _PARALLEL_GRAPH is None:
65
+ raise RuntimeError("Parallel Γ worker initialised without graph reference")
66
+ return [
67
+ (node, float(eval_gamma(_PARALLEL_GRAPH, node, t))) for node in chunk
68
+ ]
69
+
70
+
71
+ def _normalise_jobs(n_jobs: int | None, total: int) -> int | None:
72
+ """Return an effective worker count respecting serial fallbacks."""
73
+
74
+ if n_jobs is None:
75
+ return None
76
+ try:
77
+ workers = int(n_jobs)
78
+ except (TypeError, ValueError):
79
+ return None
80
+ if workers <= 1 or total <= 1:
81
+ return None
82
+ return max(1, min(workers, total))
83
+
84
+
85
+ def _chunk_nodes(nodes: list[NodeId], chunk_size: int) -> Iterable[list[NodeId]]:
86
+ """Yield deterministic chunks from ``nodes`` respecting insertion order."""
87
+
88
+ for idx in range(0, len(nodes), chunk_size):
89
+ yield nodes[idx:idx + chunk_size]
90
+
91
+
92
+ def _apply_increment_chunk(
93
+ chunk: list[tuple[NodeId, float, float, tuple[float, ...]]],
94
+ dt_step: float,
95
+ method: str,
96
+ ) -> list[tuple[NodeId, tuple[float, float, float]]]:
97
+ """Compute updated states for ``chunk`` using scalar arithmetic."""
98
+
99
+ results: list[tuple[NodeId, tuple[float, float, float]]] = []
100
+ dt_nonzero = dt_step != 0
101
+
102
+ for node, epi_i, dEPI_prev, ks in chunk:
103
+ if method == "rk4":
104
+ k1, k2, k3, k4 = ks
105
+ epi = epi_i + (dt_step / 6.0) * (k1 + 2 * k2 + 2 * k3 + k4)
106
+ dEPI_dt = k4
107
+ else:
108
+ (k1,) = ks
109
+ epi = epi_i + dt_step * k1
110
+ dEPI_dt = k1
111
+ d2epi = (dEPI_dt - dEPI_prev) / dt_step if dt_nonzero else 0.0
112
+ results.append((node, (float(epi), float(dEPI_dt), float(d2epi))))
113
+
114
+ return results
115
+
116
+
117
+ def _evaluate_gamma_map(
118
+ G: TNFRGraph,
119
+ nodes: list[NodeId],
120
+ t: float,
121
+ *,
122
+ n_jobs: int | None = None,
123
+ ) -> GammaMap:
124
+ """Return Γ evaluations for ``nodes`` at time ``t`` respecting parallelism."""
125
+
126
+ workers = _normalise_jobs(n_jobs, len(nodes))
127
+ if workers is None:
128
+ return {n: float(eval_gamma(G, n, t)) for n in nodes}
129
+
130
+ chunk_size = max(1, math.ceil(len(nodes) / (workers * 4)))
131
+ mp_ctx = get_context("spawn")
132
+ tasks = ((chunk, t) for chunk in _chunk_nodes(nodes, chunk_size))
133
+
134
+ results: GammaMap = {}
135
+ with ProcessPoolExecutor(
136
+ max_workers=workers,
137
+ mp_context=mp_ctx,
138
+ initializer=_gamma_worker_init,
139
+ initargs=(G,),
140
+ ) as executor:
141
+ futures = [executor.submit(_gamma_worker, task) for task in tasks]
142
+ for fut in futures:
143
+ for node, value in fut.result():
144
+ results[node] = value
145
+ return results
146
+
147
+
29
148
  def prepare_integration_params(
30
- G,
149
+ G: TNFRGraph,
31
150
  dt: float | None = None,
32
151
  t: float | None = None,
33
152
  method: Literal["euler", "rk4"] | None = None,
34
- ):
153
+ ) -> tuple[float, int, float, Literal["euler", "rk4"]]:
35
154
  """Validate and normalise ``dt``, ``t`` and ``method`` for integration.
36
155
 
37
156
  Returns ``(dt_step, steps, t0, method)`` where ``dt_step`` is the
@@ -52,13 +171,13 @@ def prepare_integration_params(
52
171
  else:
53
172
  t = float(t)
54
173
 
55
- method = (
174
+ method_value = (
56
175
  method
57
176
  or G.graph.get(
58
177
  "INTEGRATOR_METHOD", DEFAULTS.get("INTEGRATOR_METHOD", "euler")
59
178
  )
60
179
  ).lower()
61
- if method not in ("euler", "rk4"):
180
+ if method_value not in ("euler", "rk4"):
62
181
  raise ValueError("method must be 'euler' or 'rk4'")
63
182
 
64
183
  dt_min = float(G.graph.get("DT_MIN", DEFAULTS.get("DT_MIN", 0.0)))
@@ -69,43 +188,108 @@ def prepare_integration_params(
69
188
  # ``steps`` is guaranteed to be ≥1 at this point
70
189
  dt_step = dt / steps
71
190
 
72
- return dt_step, steps, t, method
191
+ return dt_step, steps, t, cast(Literal["euler", "rk4"], method_value)
73
192
 
74
193
 
75
194
  def _apply_increments(
76
- G: Any,
195
+ G: TNFRGraph,
77
196
  dt_step: float,
78
- increments: dict[Any, tuple[float, ...]],
197
+ increments: NodeIncrements,
79
198
  *,
80
199
  method: str,
81
- ) -> dict[Any, tuple[float, float, float]]:
200
+ n_jobs: int | None = None,
201
+ ) -> NodalUpdate:
82
202
  """Combine precomputed increments to update node states."""
83
203
 
84
- new_states: dict[Any, tuple[float, float, float]] = {}
85
- for n, nd in G.nodes(data=True):
86
- vf, dnfr, dEPI_dt_prev, epi_i = _node_state(nd)
87
- ks = increments[n]
204
+ nodes: list[NodeId] = list(G.nodes)
205
+ if not nodes:
206
+ return {}
207
+
208
+ np = get_numpy()
209
+
210
+ epi_initial: list[float] = []
211
+ dEPI_prev: list[float] = []
212
+ ordered_increments: list[tuple[float, ...]] = []
213
+
214
+ for node in nodes:
215
+ nd = G.nodes[node]
216
+ _, _, dEPI_dt_prev, epi_i = _node_state(nd)
217
+ epi_initial.append(float(epi_i))
218
+ dEPI_prev.append(float(dEPI_dt_prev))
219
+ ordered_increments.append(increments[node])
220
+
221
+ if np is not None:
222
+ epi_arr = np.asarray(epi_initial, dtype=float)
223
+ dEPI_prev_arr = np.asarray(dEPI_prev, dtype=float)
224
+ k_arr = np.asarray(ordered_increments, dtype=float)
225
+
88
226
  if method == "rk4":
89
- k1, k2, k3, k4 = ks
90
- # RK4: EPIₙ₊₁ = EPIᵢ + Δt/6·(k1 + 2k2 + 2k3 + k4)
91
- epi = epi_i + (dt_step / 6.0) * (k1 + 2 * k2 + 2 * k3 + k4)
227
+ if k_arr.ndim != 2 or k_arr.shape[1] != 4:
228
+ raise ValueError("rk4 increments require four staged values")
229
+ dt_factor = dt_step / 6.0
230
+ k1 = k_arr[:, 0]
231
+ k2 = k_arr[:, 1]
232
+ k3 = k_arr[:, 2]
233
+ k4 = k_arr[:, 3]
234
+ epi = epi_arr + dt_factor * (k1 + 2 * k2 + 2 * k3 + k4)
92
235
  dEPI_dt = k4
93
236
  else:
94
- (k1,) = ks
95
- # Euler: EPIₙ₊₁ = EPIᵢ + Δt·k1 where k1 = νf·ΔNFR + Γ
96
- epi = epi_i + dt_step * k1
237
+ if k_arr.ndim == 1:
238
+ k1 = k_arr
239
+ else:
240
+ k1 = k_arr[:, 0]
241
+ epi = epi_arr + dt_step * k1
97
242
  dEPI_dt = k1
98
- d2epi = (dEPI_dt - dEPI_dt_prev) / dt_step if dt_step != 0 else 0.0
99
- new_states[n] = (epi, dEPI_dt, d2epi)
100
- return new_states
243
+
244
+ if dt_step != 0:
245
+ d2epi = (dEPI_dt - dEPI_prev_arr) / dt_step
246
+ else:
247
+ d2epi = np.zeros_like(dEPI_dt)
248
+
249
+ results: NodalUpdate = {}
250
+ for idx, node in enumerate(nodes):
251
+ results[node] = (
252
+ float(epi[idx]),
253
+ float(dEPI_dt[idx]),
254
+ float(d2epi[idx]),
255
+ )
256
+ return results
257
+
258
+ payload: list[tuple[NodeId, float, float, tuple[float, ...]]] = list(
259
+ zip(nodes, epi_initial, dEPI_prev, ordered_increments)
260
+ )
261
+
262
+ workers = _normalise_jobs(n_jobs, len(nodes))
263
+ if workers is None:
264
+ return dict(_apply_increment_chunk(payload, dt_step, method))
265
+
266
+ chunk_size = max(1, math.ceil(len(nodes) / (workers * 4)))
267
+ mp_ctx = get_context("spawn")
268
+
269
+ results: NodalUpdate = {}
270
+ with ProcessPoolExecutor(max_workers=workers, mp_context=mp_ctx) as executor:
271
+ futures = [
272
+ executor.submit(
273
+ _apply_increment_chunk,
274
+ chunk,
275
+ dt_step,
276
+ method,
277
+ )
278
+ for chunk in _chunk_nodes(payload, chunk_size)
279
+ ]
280
+ for fut in futures:
281
+ for node, value in fut.result():
282
+ results[node] = value
283
+
284
+ return {node: results[node] for node in nodes}
101
285
 
102
286
 
103
287
  def _collect_nodal_increments(
104
- G: Any,
105
- gamma_maps: tuple[dict[Any, float], ...],
288
+ G: TNFRGraph,
289
+ gamma_maps: tuple[GammaMap, ...],
106
290
  *,
107
291
  method: str,
108
- ) -> dict[Any, tuple[float, ...]]:
292
+ ) -> NodeIncrements:
109
293
  """Combine node base state with staged Γ contributions.
110
294
 
111
295
  ``gamma_maps`` must contain one entry for Euler integration and four for
@@ -113,38 +297,71 @@ def _collect_nodal_increments(
113
297
  with the supplied Γ evaluations.
114
298
  """
115
299
 
116
- increments: dict[Any, tuple[float, ...]] = {}
117
- for n, nd in G.nodes(data=True):
300
+ nodes: list[NodeId] = list(G.nodes())
301
+ if not nodes:
302
+ return {}
303
+
304
+ if method == "rk4":
305
+ expected_maps = 4
306
+ elif method == "euler":
307
+ expected_maps = 1
308
+ else:
309
+ raise ValueError("method must be 'euler' or 'rk4'")
310
+
311
+ if len(gamma_maps) != expected_maps:
312
+ raise ValueError(f"{method} integration requires {expected_maps} gamma maps")
313
+
314
+ np = get_numpy()
315
+ if np is not None:
316
+ vf = collect_attr(G, nodes, ALIAS_VF, 0.0, np=np)
317
+ dnfr = collect_attr(G, nodes, ALIAS_DNFR, 0.0, np=np)
318
+ base = vf * dnfr
319
+
320
+ gamma_arrays = [
321
+ np.fromiter((gm.get(n, 0.0) for n in nodes), float, count=len(nodes))
322
+ for gm in gamma_maps
323
+ ]
324
+ if gamma_arrays:
325
+ gamma_stack = np.stack(gamma_arrays, axis=1)
326
+ combined = base[:, None] + gamma_stack
327
+ else:
328
+ combined = base[:, None]
329
+
330
+ return {
331
+ node: tuple(float(value) for value in combined[idx])
332
+ for idx, node in enumerate(nodes)
333
+ }
334
+
335
+ increments: NodeIncrements = {}
336
+ for node in nodes:
337
+ nd = G.nodes[node]
118
338
  vf, dnfr, *_ = _node_state(nd)
119
339
  base = vf * dnfr
120
- gammas = [gm.get(n, 0.0) for gm in gamma_maps]
340
+ gammas = [gm.get(node, 0.0) for gm in gamma_maps]
121
341
 
122
342
  if method == "rk4":
123
- if len(gammas) != 4:
124
- raise ValueError("rk4 integration requires four gamma maps")
125
343
  k1, k2, k3, k4 = gammas
126
- increments[n] = (
344
+ increments[node] = (
127
345
  base + k1,
128
346
  base + k2,
129
347
  base + k3,
130
348
  base + k4,
131
349
  )
132
350
  else:
133
- if len(gammas) != 1:
134
- raise ValueError("euler integration requires one gamma map")
135
351
  (k1,) = gammas
136
- increments[n] = (base + k1,)
352
+ increments[node] = (base + k1,)
137
353
 
138
354
  return increments
139
355
 
140
356
 
141
357
  def _build_gamma_increments(
142
- G: Any,
358
+ G: TNFRGraph,
143
359
  dt_step: float,
144
360
  t_local: float,
145
361
  *,
146
362
  method: str,
147
- ) -> dict[Any, tuple[float, ...]]:
363
+ n_jobs: int | None = None,
364
+ ) -> NodeIncrements:
148
365
  """Evaluate Γ contributions and merge them with ``νf·ΔNFR`` base terms."""
149
366
 
150
367
  if method == "rk4":
@@ -163,50 +380,146 @@ def _build_gamma_increments(
163
380
  gamma_type = str(gamma_spec.get("type", "")).lower()
164
381
 
165
382
  if gamma_type == "none":
166
- gamma_maps = tuple({} for _ in range(gamma_count))
383
+ gamma_maps: tuple[GammaMap, ...] = tuple(
384
+ cast(GammaMap, {}) for _ in range(gamma_count)
385
+ )
386
+ return _collect_nodal_increments(G, gamma_maps, method=method)
387
+
388
+ nodes: list[NodeId] = list(G.nodes)
389
+ if not nodes:
390
+ gamma_maps = tuple(cast(GammaMap, {}) for _ in range(gamma_count))
167
391
  return _collect_nodal_increments(G, gamma_maps, method=method)
168
392
 
169
393
  if method == "rk4":
170
394
  t_mid = t_local + dt_step / 2.0
171
395
  t_end = t_local + dt_step
172
- g1_map = {n: eval_gamma(G, n, t_local) for n in G.nodes}
173
- g_mid_map = {n: eval_gamma(G, n, t_mid) for n in G.nodes}
174
- g4_map = {n: eval_gamma(G, n, t_end) for n in G.nodes}
396
+ g1_map = _evaluate_gamma_map(G, nodes, t_local, n_jobs=n_jobs)
397
+ g_mid_map = _evaluate_gamma_map(G, nodes, t_mid, n_jobs=n_jobs)
398
+ g4_map = _evaluate_gamma_map(G, nodes, t_end, n_jobs=n_jobs)
175
399
  gamma_maps = (g1_map, g_mid_map, g_mid_map, g4_map)
176
400
  else: # method == "euler"
177
- gamma_maps = ({n: eval_gamma(G, n, t_local) for n in G.nodes},)
401
+ gamma_maps = (
402
+ _evaluate_gamma_map(G, nodes, t_local, n_jobs=n_jobs),
403
+ )
178
404
 
179
405
  return _collect_nodal_increments(G, gamma_maps, method=method)
180
406
 
181
407
 
182
- def _integrate_euler(G, dt_step: float, t_local: float):
408
+ def _integrate_euler(
409
+ G: TNFRGraph,
410
+ dt_step: float,
411
+ t_local: float,
412
+ *,
413
+ n_jobs: int | None = None,
414
+ ) -> NodalUpdate:
183
415
  """One explicit Euler integration step."""
184
416
  increments = _build_gamma_increments(
185
417
  G,
186
418
  dt_step,
187
419
  t_local,
188
420
  method="euler",
421
+ n_jobs=n_jobs,
422
+ )
423
+ return _apply_increments(
424
+ G,
425
+ dt_step,
426
+ increments,
427
+ method="euler",
428
+ n_jobs=n_jobs,
189
429
  )
190
- return _apply_increments(G, dt_step, increments, method="euler")
191
430
 
192
431
 
193
- def _integrate_rk4(G, dt_step: float, t_local: float):
432
+ def _integrate_rk4(
433
+ G: TNFRGraph,
434
+ dt_step: float,
435
+ t_local: float,
436
+ *,
437
+ n_jobs: int | None = None,
438
+ ) -> NodalUpdate:
194
439
  """One Runge–Kutta order-4 integration step."""
195
440
  increments = _build_gamma_increments(
196
441
  G,
197
442
  dt_step,
198
443
  t_local,
199
444
  method="rk4",
445
+ n_jobs=n_jobs,
200
446
  )
201
- return _apply_increments(G, dt_step, increments, method="rk4")
447
+ return _apply_increments(
448
+ G,
449
+ dt_step,
450
+ increments,
451
+ method="rk4",
452
+ n_jobs=n_jobs,
453
+ )
454
+
455
+
456
+ class AbstractIntegrator(ABC):
457
+ """Abstract base class encapsulating nodal equation integration."""
458
+
459
+ @abstractmethod
460
+ def integrate(
461
+ self,
462
+ graph: TNFRGraph,
463
+ *,
464
+ dt: float | None,
465
+ t: float | None,
466
+ method: str | None,
467
+ n_jobs: int | None,
468
+ ) -> None:
469
+ """Advance ``graph`` coherence states according to the nodal equation."""
470
+
471
+
472
+ class DefaultIntegrator(AbstractIntegrator):
473
+ """Explicit integrator combining Euler and RK4 step implementations."""
474
+
475
+ def integrate(
476
+ self,
477
+ graph: TNFRGraph,
478
+ *,
479
+ dt: float | None,
480
+ t: float | None,
481
+ method: str | None,
482
+ n_jobs: int | None,
483
+ ) -> None:
484
+ if not isinstance(
485
+ graph, (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)
486
+ ):
487
+ raise TypeError("G must be a networkx graph instance")
488
+
489
+ dt_step, steps, t0, resolved_method = prepare_integration_params(
490
+ graph, dt, t, cast(IntegratorMethod | None, method)
491
+ )
492
+
493
+ t_local = t0
494
+ for _ in range(steps):
495
+ if resolved_method == "rk4":
496
+ updates: NodalUpdate = _integrate_rk4(
497
+ graph, dt_step, t_local, n_jobs=n_jobs
498
+ )
499
+ else:
500
+ updates = _integrate_euler(graph, dt_step, t_local, n_jobs=n_jobs)
501
+
502
+ for n, (epi, dEPI_dt, d2epi) in updates.items():
503
+ nd = graph.nodes[n]
504
+ epi_kind = get_attr_str(nd, ALIAS_EPI_KIND, "")
505
+ set_attr(nd, ALIAS_EPI, epi)
506
+ if epi_kind:
507
+ set_attr_str(nd, ALIAS_EPI_KIND, epi_kind)
508
+ set_attr(nd, ALIAS_DEPI, dEPI_dt)
509
+ set_attr(nd, ALIAS_D2EPI, d2epi)
510
+
511
+ t_local += dt_step
512
+
513
+ graph.graph["_t"] = t_local
202
514
 
203
515
 
204
516
  def update_epi_via_nodal_equation(
205
- G,
517
+ G: TNFRGraph,
206
518
  *,
207
519
  dt: float | None = None,
208
520
  t: float | None = None,
209
521
  method: Literal["euler", "rk4"] | None = None,
522
+ n_jobs: int | None = None,
210
523
  ) -> None:
211
524
  """TNFR nodal equation.
212
525
 
@@ -224,32 +537,13 @@ def update_epi_via_nodal_equation(
224
537
  TNFR references: nodal equation (manual), νf/ΔNFR/EPI glossary, Γ operator.
225
538
  Side effects: caches dEPI and updates EPI via explicit integration.
226
539
  """
227
- if not isinstance(
228
- G, (nx.Graph, nx.DiGraph, nx.MultiGraph, nx.MultiDiGraph)
229
- ):
230
- raise TypeError("G must be a networkx graph instance")
231
-
232
- dt_step, steps, t0, method = prepare_integration_params(G, dt, t, method)
233
-
234
- t_local = t0
235
- for _ in range(steps):
236
- if method == "rk4":
237
- updates = _integrate_rk4(G, dt_step, t_local)
238
- else:
239
- updates = _integrate_euler(G, dt_step, t_local)
240
-
241
- for n, (epi, dEPI_dt, d2epi) in updates.items():
242
- nd = G.nodes[n]
243
- epi_kind = get_attr_str(nd, ALIAS_EPI_KIND, "")
244
- set_attr(nd, ALIAS_EPI, epi)
245
- if epi_kind:
246
- set_attr_str(nd, ALIAS_EPI_KIND, epi_kind)
247
- set_attr(nd, ALIAS_DEPI, dEPI_dt)
248
- set_attr(nd, ALIAS_D2EPI, d2epi)
249
-
250
- t_local += dt_step
251
-
252
- G.graph["_t"] = t_local
540
+ DefaultIntegrator().integrate(
541
+ G,
542
+ dt=dt,
543
+ t=t,
544
+ method=method,
545
+ n_jobs=n_jobs,
546
+ )
253
547
 
254
548
 
255
549
  def _node_state(nd: dict[str, Any]) -> tuple[float, float, float, float]:
@@ -0,0 +1,35 @@
1
+ from typing import Literal
2
+
3
+ from tnfr.types import TNFRGraph
4
+
5
+ __all__: tuple[str, ...]
6
+
7
+ class AbstractIntegrator:
8
+ def integrate(
9
+ self,
10
+ graph: TNFRGraph,
11
+ *,
12
+ dt: float | None = ...,
13
+ t: float | None = ...,
14
+ method: str | None = ...,
15
+ n_jobs: int | None = ...,
16
+ ) -> None: ...
17
+
18
+ class DefaultIntegrator(AbstractIntegrator):
19
+ def __init__(self) -> None: ...
20
+
21
+ def prepare_integration_params(
22
+ G: TNFRGraph,
23
+ dt: float | None = ...,
24
+ t: float | None = ...,
25
+ method: Literal["euler", "rk4"] | None = ...,
26
+ ) -> tuple[float, int, float, Literal["euler", "rk4"]]: ...
27
+
28
+ def update_epi_via_nodal_equation(
29
+ G: TNFRGraph,
30
+ *,
31
+ dt: float | None = ...,
32
+ t: float | None = ...,
33
+ method: Literal["euler", "rk4"] | None = ...,
34
+ n_jobs: int | None = ...,
35
+ ) -> None: ...