tnfr 4.5.2__py3-none-any.whl → 6.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tnfr/__init__.py +228 -49
- tnfr/__init__.pyi +40 -0
- tnfr/_compat.py +11 -0
- tnfr/_version.py +7 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +106 -21
- tnfr/alias.pyi +140 -0
- tnfr/cache.py +666 -512
- tnfr/cache.pyi +232 -0
- tnfr/callback_utils.py +2 -9
- tnfr/callback_utils.pyi +105 -0
- tnfr/cli/__init__.py +21 -7
- tnfr/cli/__init__.pyi +47 -0
- tnfr/cli/arguments.py +42 -20
- tnfr/cli/arguments.pyi +33 -0
- tnfr/cli/execution.py +54 -20
- tnfr/cli/execution.pyi +80 -0
- tnfr/cli/utils.py +0 -2
- tnfr/cli/utils.pyi +8 -0
- tnfr/config/__init__.py +12 -0
- tnfr/config/__init__.pyi +8 -0
- tnfr/config/constants.py +104 -0
- tnfr/config/constants.pyi +12 -0
- tnfr/{config.py → config/init.py} +11 -7
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +106 -0
- tnfr/config/operator_names.pyi +28 -0
- tnfr/config/presets.py +104 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/constants/__init__.py +78 -24
- tnfr/constants/__init__.pyi +104 -0
- tnfr/constants/core.py +1 -2
- tnfr/constants/core.pyi +17 -0
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +4 -12
- tnfr/constants/metric.pyi +19 -0
- tnfr/constants_glyphs.py +9 -91
- tnfr/constants_glyphs.pyi +12 -0
- tnfr/dynamics/__init__.py +112 -634
- tnfr/dynamics/__init__.pyi +83 -0
- tnfr/dynamics/adaptation.py +201 -0
- tnfr/dynamics/aliases.py +22 -0
- tnfr/dynamics/coordination.py +343 -0
- tnfr/dynamics/dnfr.py +1936 -354
- tnfr/dynamics/dnfr.pyi +33 -0
- tnfr/dynamics/integrators.py +369 -75
- tnfr/dynamics/integrators.pyi +35 -0
- tnfr/dynamics/runtime.py +521 -0
- tnfr/dynamics/sampling.py +8 -5
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +680 -0
- tnfr/execution.py +56 -41
- tnfr/execution.pyi +65 -0
- tnfr/flatten.py +7 -7
- tnfr/flatten.pyi +28 -0
- tnfr/gamma.py +54 -37
- tnfr/gamma.pyi +40 -0
- tnfr/glyph_history.py +85 -38
- tnfr/glyph_history.pyi +53 -0
- tnfr/grammar.py +19 -338
- tnfr/grammar.pyi +13 -0
- tnfr/helpers/__init__.py +110 -30
- tnfr/helpers/__init__.pyi +66 -0
- tnfr/helpers/numeric.py +1 -0
- tnfr/helpers/numeric.pyi +12 -0
- tnfr/immutable.py +55 -19
- tnfr/immutable.pyi +37 -0
- tnfr/initialization.py +12 -10
- tnfr/initialization.pyi +73 -0
- tnfr/io.py +99 -34
- tnfr/io.pyi +11 -0
- tnfr/locking.pyi +7 -0
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/coherence.py +934 -294
- tnfr/metrics/common.py +1 -3
- tnfr/metrics/common.pyi +15 -0
- tnfr/metrics/core.py +192 -34
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +707 -101
- tnfr/metrics/diagnosis.pyi +89 -0
- tnfr/metrics/export.py +27 -13
- tnfr/metrics/glyph_timing.py +218 -38
- tnfr/metrics/reporting.py +22 -18
- tnfr/metrics/reporting.pyi +12 -0
- tnfr/metrics/sense_index.py +199 -25
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +53 -18
- tnfr/metrics/trig.pyi +12 -0
- tnfr/metrics/trig_cache.py +3 -7
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/node.py +148 -125
- tnfr/node.pyi +161 -0
- tnfr/observers.py +44 -30
- tnfr/observers.pyi +46 -0
- tnfr/ontosim.py +14 -13
- tnfr/ontosim.pyi +33 -0
- tnfr/operators/__init__.py +84 -52
- tnfr/operators/__init__.pyi +31 -0
- tnfr/operators/definitions.py +181 -0
- tnfr/operators/definitions.pyi +92 -0
- tnfr/operators/jitter.py +86 -23
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/registry.py +80 -0
- tnfr/operators/registry.pyi +15 -0
- tnfr/operators/remesh.py +141 -57
- tnfr/presets.py +9 -54
- tnfr/presets.pyi +7 -0
- tnfr/py.typed +0 -0
- tnfr/rng.py +259 -73
- tnfr/rng.pyi +14 -0
- tnfr/selector.py +24 -17
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +55 -43
- tnfr/sense.pyi +30 -0
- tnfr/structural.py +44 -267
- tnfr/structural.pyi +46 -0
- tnfr/telemetry/__init__.py +13 -0
- tnfr/telemetry/verbosity.py +37 -0
- tnfr/tokens.py +3 -2
- tnfr/tokens.pyi +41 -0
- tnfr/trace.py +272 -82
- tnfr/trace.pyi +68 -0
- tnfr/types.py +345 -6
- tnfr/types.pyi +145 -0
- tnfr/utils/__init__.py +158 -0
- tnfr/utils/__init__.pyi +133 -0
- tnfr/utils/cache.py +755 -0
- tnfr/utils/cache.pyi +156 -0
- tnfr/{collections_utils.py → utils/data.py} +57 -90
- tnfr/utils/data.pyi +73 -0
- tnfr/utils/graph.py +87 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +746 -0
- tnfr/utils/init.pyi +85 -0
- tnfr/{json_utils.py → utils/io.py} +13 -18
- tnfr/utils/io.pyi +10 -0
- tnfr/utils/validators.py +130 -0
- tnfr/utils/validators.pyi +19 -0
- tnfr/validation/__init__.py +25 -0
- tnfr/validation/__init__.pyi +17 -0
- tnfr/validation/compatibility.py +59 -0
- tnfr/validation/compatibility.pyi +8 -0
- tnfr/validation/grammar.py +149 -0
- tnfr/validation/grammar.pyi +11 -0
- tnfr/validation/rules.py +194 -0
- tnfr/validation/rules.pyi +18 -0
- tnfr/validation/syntax.py +151 -0
- tnfr/validation/syntax.pyi +7 -0
- tnfr-6.0.0.dist-info/METADATA +135 -0
- tnfr-6.0.0.dist-info/RECORD +157 -0
- tnfr/graph_utils.py +0 -84
- tnfr/import_utils.py +0 -228
- tnfr/logging_utils.py +0 -116
- tnfr/validators.py +0 -84
- tnfr/value_utils.py +0 -59
- tnfr-4.5.2.dist-info/METADATA +0 -379
- tnfr-4.5.2.dist-info/RECORD +0 -67
- {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/WHEEL +0 -0
- {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/entry_points.txt +0 -0
- {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/licenses/LICENSE.md +0 -0
- {tnfr-4.5.2.dist-info → tnfr-6.0.0.dist-info}/top_level.txt +0 -0
tnfr/dynamics/dnfr.py
CHANGED
|
@@ -2,32 +2,59 @@
|
|
|
2
2
|
|
|
3
3
|
This module provides helper functions to configure, cache and apply ΔNFR
|
|
4
4
|
components such as phase, epidemiological state and vortex fields during
|
|
5
|
-
simulations.
|
|
5
|
+
simulations. The neighbour accumulation helpers reuse cached edge indices
|
|
6
|
+
and NumPy workspaces whenever available so cosine, sine, EPI, νf and topology
|
|
7
|
+
means remain faithful to the canonical ΔNFR reorganisation without redundant
|
|
8
|
+
allocations.
|
|
6
9
|
"""
|
|
7
10
|
|
|
8
11
|
from __future__ import annotations
|
|
9
12
|
|
|
10
13
|
import math
|
|
14
|
+
import sys
|
|
15
|
+
from concurrent.futures import ProcessPoolExecutor
|
|
11
16
|
from dataclasses import dataclass
|
|
12
|
-
from
|
|
17
|
+
from types import ModuleType
|
|
18
|
+
from collections.abc import Callable, Iterator, Mapping, MutableMapping, Sequence
|
|
19
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
13
20
|
|
|
14
|
-
from ..
|
|
21
|
+
from ..alias import get_attr, get_theta_attr, set_dnfr
|
|
15
22
|
from ..constants import DEFAULTS, get_aliases, get_param
|
|
16
|
-
from ..cache import cached_nodes_and_A
|
|
17
23
|
from ..helpers.numeric import angle_diff
|
|
18
|
-
from ..
|
|
19
|
-
from ..alias import (
|
|
20
|
-
get_attr,
|
|
21
|
-
set_dnfr,
|
|
22
|
-
)
|
|
23
|
-
from ..metrics.trig_cache import compute_theta_trig
|
|
24
|
+
from ..cache import CacheManager
|
|
24
25
|
from ..metrics.common import merge_and_normalize_weights
|
|
25
|
-
from ..
|
|
26
|
-
|
|
26
|
+
from ..metrics.trig import neighbor_phase_mean_list
|
|
27
|
+
from ..metrics.trig_cache import compute_theta_trig
|
|
28
|
+
from ..utils import cached_node_list, cached_nodes_and_A, get_numpy, normalize_weights
|
|
29
|
+
from ..utils.cache import DNFR_PREP_STATE_KEY, DnfrPrepState, _graph_cache_manager
|
|
30
|
+
from ..types import (
|
|
31
|
+
DeltaNFRHook,
|
|
32
|
+
DnfrCacheVectors,
|
|
33
|
+
DnfrVectorMap,
|
|
34
|
+
NeighborStats,
|
|
35
|
+
NodeId,
|
|
36
|
+
TNFRGraph,
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
if TYPE_CHECKING: # pragma: no cover - import-time typing hook
|
|
40
|
+
import numpy as np
|
|
27
41
|
ALIAS_EPI = get_aliases("EPI")
|
|
28
42
|
ALIAS_VF = get_aliases("VF")
|
|
29
43
|
|
|
30
44
|
|
|
45
|
+
_MEAN_VECTOR_EPS = 1e-12
|
|
46
|
+
_SPARSE_DENSITY_THRESHOLD = 0.25
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _should_vectorize(G: TNFRGraph, np_module: ModuleType | None) -> bool:
|
|
50
|
+
"""Return ``True`` when NumPy is available unless the graph disables it."""
|
|
51
|
+
|
|
52
|
+
if np_module is None:
|
|
53
|
+
return False
|
|
54
|
+
flag = G.graph.get("vectorized_dnfr")
|
|
55
|
+
if flag is None:
|
|
56
|
+
return True
|
|
57
|
+
return bool(flag)
|
|
31
58
|
|
|
32
59
|
|
|
33
60
|
@dataclass
|
|
@@ -38,6 +65,16 @@ class DnfrCache:
|
|
|
38
65
|
vf: list[float]
|
|
39
66
|
cos_theta: list[float]
|
|
40
67
|
sin_theta: list[float]
|
|
68
|
+
neighbor_x: list[float]
|
|
69
|
+
neighbor_y: list[float]
|
|
70
|
+
neighbor_epi_sum: list[float]
|
|
71
|
+
neighbor_vf_sum: list[float]
|
|
72
|
+
neighbor_count: list[float]
|
|
73
|
+
neighbor_deg_sum: list[float] | None
|
|
74
|
+
th_bar: list[float] | None = None
|
|
75
|
+
epi_bar: list[float] | None = None
|
|
76
|
+
vf_bar: list[float] | None = None
|
|
77
|
+
deg_bar: list[float] | None = None
|
|
41
78
|
degs: dict[Any, float] | None = None
|
|
42
79
|
deg_list: list[float] | None = None
|
|
43
80
|
theta_np: Any | None = None
|
|
@@ -46,7 +83,221 @@ class DnfrCache:
|
|
|
46
83
|
cos_theta_np: Any | None = None
|
|
47
84
|
sin_theta_np: Any | None = None
|
|
48
85
|
deg_array: Any | None = None
|
|
86
|
+
edge_src: Any | None = None
|
|
87
|
+
edge_dst: Any | None = None
|
|
49
88
|
checksum: Any | None = None
|
|
89
|
+
neighbor_x_np: Any | None = None
|
|
90
|
+
neighbor_y_np: Any | None = None
|
|
91
|
+
neighbor_epi_sum_np: Any | None = None
|
|
92
|
+
neighbor_vf_sum_np: Any | None = None
|
|
93
|
+
neighbor_count_np: Any | None = None
|
|
94
|
+
neighbor_deg_sum_np: Any | None = None
|
|
95
|
+
th_bar_np: Any | None = None
|
|
96
|
+
epi_bar_np: Any | None = None
|
|
97
|
+
vf_bar_np: Any | None = None
|
|
98
|
+
deg_bar_np: Any | None = None
|
|
99
|
+
grad_phase_np: Any | None = None
|
|
100
|
+
grad_epi_np: Any | None = None
|
|
101
|
+
grad_vf_np: Any | None = None
|
|
102
|
+
grad_topo_np: Any | None = None
|
|
103
|
+
grad_total_np: Any | None = None
|
|
104
|
+
dense_components_np: Any | None = None
|
|
105
|
+
dense_accum_np: Any | None = None
|
|
106
|
+
dense_degree_np: Any | None = None
|
|
107
|
+
neighbor_accum_np: Any | None = None
|
|
108
|
+
neighbor_inv_count_np: Any | None = None
|
|
109
|
+
neighbor_cos_avg_np: Any | None = None
|
|
110
|
+
neighbor_sin_avg_np: Any | None = None
|
|
111
|
+
neighbor_mean_tmp_np: Any | None = None
|
|
112
|
+
neighbor_mean_length_np: Any | None = None
|
|
113
|
+
edge_signature: Any | None = None
|
|
114
|
+
neighbor_accum_signature: Any | None = None
|
|
115
|
+
neighbor_edge_values_np: Any | None = None
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
_NUMPY_CACHE_ATTRS = (
|
|
119
|
+
"theta_np",
|
|
120
|
+
"epi_np",
|
|
121
|
+
"vf_np",
|
|
122
|
+
"cos_theta_np",
|
|
123
|
+
"sin_theta_np",
|
|
124
|
+
"deg_array",
|
|
125
|
+
"neighbor_x_np",
|
|
126
|
+
"neighbor_y_np",
|
|
127
|
+
"neighbor_epi_sum_np",
|
|
128
|
+
"neighbor_vf_sum_np",
|
|
129
|
+
"neighbor_count_np",
|
|
130
|
+
"neighbor_deg_sum_np",
|
|
131
|
+
"neighbor_inv_count_np",
|
|
132
|
+
"neighbor_cos_avg_np",
|
|
133
|
+
"neighbor_sin_avg_np",
|
|
134
|
+
"neighbor_mean_tmp_np",
|
|
135
|
+
"neighbor_mean_length_np",
|
|
136
|
+
"neighbor_accum_np",
|
|
137
|
+
"neighbor_edge_values_np",
|
|
138
|
+
"dense_components_np",
|
|
139
|
+
"dense_accum_np",
|
|
140
|
+
"dense_degree_np",
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def _iter_chunk_offsets(total: int, jobs: int) -> Iterator[tuple[int, int]]:
|
|
145
|
+
"""Yield ``(start, end)`` offsets splitting ``total`` items across ``jobs``."""
|
|
146
|
+
|
|
147
|
+
if total <= 0 or jobs <= 1:
|
|
148
|
+
return
|
|
149
|
+
|
|
150
|
+
jobs = max(1, min(int(jobs), total))
|
|
151
|
+
base, extra = divmod(total, jobs)
|
|
152
|
+
start = 0
|
|
153
|
+
for i in range(jobs):
|
|
154
|
+
size = base + (1 if i < extra else 0)
|
|
155
|
+
if size <= 0:
|
|
156
|
+
continue
|
|
157
|
+
end = start + size
|
|
158
|
+
yield start, end
|
|
159
|
+
start = end
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def _neighbor_sums_worker(
|
|
163
|
+
start: int,
|
|
164
|
+
end: int,
|
|
165
|
+
neighbor_indices: Sequence[Sequence[int]],
|
|
166
|
+
cos_th: Sequence[float],
|
|
167
|
+
sin_th: Sequence[float],
|
|
168
|
+
epi: Sequence[float],
|
|
169
|
+
vf: Sequence[float],
|
|
170
|
+
x_base: Sequence[float],
|
|
171
|
+
y_base: Sequence[float],
|
|
172
|
+
epi_base: Sequence[float],
|
|
173
|
+
vf_base: Sequence[float],
|
|
174
|
+
count_base: Sequence[float],
|
|
175
|
+
deg_base: Sequence[float] | None,
|
|
176
|
+
deg_list: Sequence[float] | None,
|
|
177
|
+
degs_list: Sequence[float] | None,
|
|
178
|
+
) -> tuple[int, list[float], list[float], list[float], list[float], list[float], list[float] | None]:
|
|
179
|
+
"""Return partial neighbour sums for the ``[start, end)`` range."""
|
|
180
|
+
|
|
181
|
+
chunk_x: list[float] = []
|
|
182
|
+
chunk_y: list[float] = []
|
|
183
|
+
chunk_epi: list[float] = []
|
|
184
|
+
chunk_vf: list[float] = []
|
|
185
|
+
chunk_count: list[float] = []
|
|
186
|
+
chunk_deg: list[float] | None = [] if deg_base is not None else None
|
|
187
|
+
|
|
188
|
+
for offset, idx in enumerate(range(start, end)):
|
|
189
|
+
neighbors = neighbor_indices[idx]
|
|
190
|
+
x_i = float(x_base[offset])
|
|
191
|
+
y_i = float(y_base[offset])
|
|
192
|
+
epi_i = float(epi_base[offset])
|
|
193
|
+
vf_i = float(vf_base[offset])
|
|
194
|
+
count_i = float(count_base[offset])
|
|
195
|
+
if deg_base is not None and chunk_deg is not None:
|
|
196
|
+
deg_i_acc = float(deg_base[offset])
|
|
197
|
+
else:
|
|
198
|
+
deg_i_acc = 0.0
|
|
199
|
+
deg_i = float(degs_list[idx]) if degs_list is not None else 0.0
|
|
200
|
+
|
|
201
|
+
for neighbor_idx in neighbors:
|
|
202
|
+
x_i += float(cos_th[neighbor_idx])
|
|
203
|
+
y_i += float(sin_th[neighbor_idx])
|
|
204
|
+
epi_i += float(epi[neighbor_idx])
|
|
205
|
+
vf_i += float(vf[neighbor_idx])
|
|
206
|
+
count_i += 1.0
|
|
207
|
+
if chunk_deg is not None:
|
|
208
|
+
if deg_list is not None:
|
|
209
|
+
deg_i_acc += float(deg_list[neighbor_idx])
|
|
210
|
+
else:
|
|
211
|
+
deg_i_acc += deg_i
|
|
212
|
+
|
|
213
|
+
chunk_x.append(x_i)
|
|
214
|
+
chunk_y.append(y_i)
|
|
215
|
+
chunk_epi.append(epi_i)
|
|
216
|
+
chunk_vf.append(vf_i)
|
|
217
|
+
chunk_count.append(count_i)
|
|
218
|
+
if chunk_deg is not None:
|
|
219
|
+
chunk_deg.append(deg_i_acc)
|
|
220
|
+
|
|
221
|
+
return (
|
|
222
|
+
start,
|
|
223
|
+
chunk_x,
|
|
224
|
+
chunk_y,
|
|
225
|
+
chunk_epi,
|
|
226
|
+
chunk_vf,
|
|
227
|
+
chunk_count,
|
|
228
|
+
chunk_deg,
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def _dnfr_gradients_worker(
|
|
233
|
+
start: int,
|
|
234
|
+
end: int,
|
|
235
|
+
nodes: Sequence[NodeId],
|
|
236
|
+
theta: list[float],
|
|
237
|
+
epi: list[float],
|
|
238
|
+
vf: list[float],
|
|
239
|
+
th_bar: list[float],
|
|
240
|
+
epi_bar: list[float],
|
|
241
|
+
vf_bar: list[float],
|
|
242
|
+
deg_bar: list[float] | None,
|
|
243
|
+
degs: Mapping[Any, float] | Sequence[float] | None,
|
|
244
|
+
w_phase: float,
|
|
245
|
+
w_epi: float,
|
|
246
|
+
w_vf: float,
|
|
247
|
+
w_topo: float,
|
|
248
|
+
) -> tuple[int, list[float]]:
|
|
249
|
+
"""Return partial ΔNFR gradients for the ``[start, end)`` range."""
|
|
250
|
+
|
|
251
|
+
chunk: list[float] = []
|
|
252
|
+
for idx in range(start, end):
|
|
253
|
+
n = nodes[idx]
|
|
254
|
+
g_phase = -angle_diff(theta[idx], th_bar[idx]) / math.pi
|
|
255
|
+
g_epi = epi_bar[idx] - epi[idx]
|
|
256
|
+
g_vf = vf_bar[idx] - vf[idx]
|
|
257
|
+
if w_topo != 0.0 and deg_bar is not None and degs is not None:
|
|
258
|
+
if isinstance(degs, dict):
|
|
259
|
+
deg_i = float(degs.get(n, 0))
|
|
260
|
+
else:
|
|
261
|
+
deg_i = float(degs[idx])
|
|
262
|
+
g_topo = deg_bar[idx] - deg_i
|
|
263
|
+
else:
|
|
264
|
+
g_topo = 0.0
|
|
265
|
+
chunk.append(w_phase * g_phase + w_epi * g_epi + w_vf * g_vf + w_topo * g_topo)
|
|
266
|
+
return start, chunk
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def _resolve_parallel_jobs(n_jobs: int | None, total: int) -> int | None:
|
|
270
|
+
"""Return an effective worker count for ``total`` items or ``None``."""
|
|
271
|
+
|
|
272
|
+
if n_jobs is None:
|
|
273
|
+
return None
|
|
274
|
+
try:
|
|
275
|
+
jobs = int(n_jobs)
|
|
276
|
+
except (TypeError, ValueError):
|
|
277
|
+
return None
|
|
278
|
+
if jobs <= 1 or total <= 1:
|
|
279
|
+
return None
|
|
280
|
+
return max(1, min(jobs, total))
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def _is_numpy_like(obj) -> bool:
|
|
284
|
+
return getattr(obj, "dtype", None) is not None and getattr(obj, "shape", None) is not None
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def _has_cached_numpy_buffers(data: dict, cache: DnfrCache | None) -> bool:
|
|
288
|
+
for attr in _NUMPY_CACHE_ATTRS:
|
|
289
|
+
arr = data.get(attr)
|
|
290
|
+
if _is_numpy_like(arr):
|
|
291
|
+
return True
|
|
292
|
+
if cache is not None:
|
|
293
|
+
for attr in _NUMPY_CACHE_ATTRS:
|
|
294
|
+
arr = getattr(cache, attr, None)
|
|
295
|
+
if _is_numpy_like(arr):
|
|
296
|
+
return True
|
|
297
|
+
A = data.get("A")
|
|
298
|
+
if _is_numpy_like(A):
|
|
299
|
+
return True
|
|
300
|
+
return False
|
|
50
301
|
|
|
51
302
|
|
|
52
303
|
__all__ = (
|
|
@@ -93,37 +344,145 @@ def _configure_dnfr_weights(G) -> dict:
|
|
|
93
344
|
return weights
|
|
94
345
|
|
|
95
346
|
|
|
96
|
-
def _init_dnfr_cache(
|
|
97
|
-
|
|
98
|
-
|
|
347
|
+
def _init_dnfr_cache(
|
|
348
|
+
G: TNFRGraph,
|
|
349
|
+
nodes: Sequence[NodeId],
|
|
350
|
+
cache_or_manager: CacheManager | DnfrCache | None = None,
|
|
351
|
+
checksum: Any | None = None,
|
|
352
|
+
force_refresh: bool = False,
|
|
353
|
+
*,
|
|
354
|
+
manager: CacheManager | None = None,
|
|
355
|
+
) -> tuple[
|
|
356
|
+
DnfrCache,
|
|
357
|
+
dict[NodeId, int],
|
|
358
|
+
list[float],
|
|
359
|
+
list[float],
|
|
360
|
+
list[float],
|
|
361
|
+
list[float],
|
|
362
|
+
list[float],
|
|
363
|
+
bool,
|
|
364
|
+
]:
|
|
365
|
+
"""Initialise or reuse cached ΔNFR arrays.
|
|
366
|
+
|
|
367
|
+
``manager`` telemetry became mandatory in TNFR 9.0 to expose cache hits,
|
|
368
|
+
misses and timings. Older callers still pass a ``cache`` instance as the
|
|
369
|
+
third positional argument; this helper supports both signatures by seeding
|
|
370
|
+
the manager-backed state with the provided cache when necessary.
|
|
371
|
+
"""
|
|
372
|
+
|
|
373
|
+
if manager is None and isinstance(cache_or_manager, CacheManager):
|
|
374
|
+
manager = cache_or_manager
|
|
375
|
+
cache_or_manager = None
|
|
376
|
+
|
|
377
|
+
if manager is None:
|
|
378
|
+
manager = _graph_cache_manager(G.graph)
|
|
379
|
+
|
|
380
|
+
graph = G.graph
|
|
381
|
+
state = manager.get(DNFR_PREP_STATE_KEY)
|
|
382
|
+
if not isinstance(state, DnfrPrepState):
|
|
383
|
+
manager.clear(DNFR_PREP_STATE_KEY)
|
|
384
|
+
state = manager.get(DNFR_PREP_STATE_KEY)
|
|
385
|
+
|
|
386
|
+
if isinstance(cache_or_manager, DnfrCache):
|
|
387
|
+
state.cache = cache_or_manager
|
|
388
|
+
if checksum is None:
|
|
389
|
+
checksum = cache_or_manager.checksum
|
|
390
|
+
|
|
391
|
+
cache = state.cache
|
|
392
|
+
reuse = (
|
|
393
|
+
not force_refresh
|
|
394
|
+
and isinstance(cache, DnfrCache)
|
|
395
|
+
and cache.checksum == checksum
|
|
396
|
+
and len(cache.theta) == len(nodes)
|
|
397
|
+
)
|
|
398
|
+
if reuse:
|
|
399
|
+
manager.increment_hit(DNFR_PREP_STATE_KEY)
|
|
400
|
+
graph["_dnfr_prep_cache"] = cache
|
|
99
401
|
return (
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
402
|
+
cache,
|
|
403
|
+
cache.idx,
|
|
404
|
+
cache.theta,
|
|
405
|
+
cache.epi,
|
|
406
|
+
cache.vf,
|
|
407
|
+
cache.cos_theta,
|
|
408
|
+
cache.sin_theta,
|
|
107
409
|
False,
|
|
108
410
|
)
|
|
109
411
|
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
412
|
+
def _rebuild(current: DnfrPrepState | Any) -> DnfrPrepState:
|
|
413
|
+
if not isinstance(current, DnfrPrepState):
|
|
414
|
+
raise RuntimeError("ΔNFR prep state unavailable during rebuild")
|
|
415
|
+
prev_cache = current.cache if isinstance(current.cache, DnfrCache) else None
|
|
416
|
+
idx_local = {n: i for i, n in enumerate(nodes)}
|
|
417
|
+
size = len(nodes)
|
|
418
|
+
zeros = [0.0] * size
|
|
419
|
+
if prev_cache is None:
|
|
420
|
+
cache_new = DnfrCache(
|
|
421
|
+
idx=idx_local,
|
|
422
|
+
theta=zeros.copy(),
|
|
423
|
+
epi=zeros.copy(),
|
|
424
|
+
vf=zeros.copy(),
|
|
425
|
+
cos_theta=[1.0] * size,
|
|
426
|
+
sin_theta=[0.0] * size,
|
|
427
|
+
neighbor_x=zeros.copy(),
|
|
428
|
+
neighbor_y=zeros.copy(),
|
|
429
|
+
neighbor_epi_sum=zeros.copy(),
|
|
430
|
+
neighbor_vf_sum=zeros.copy(),
|
|
431
|
+
neighbor_count=zeros.copy(),
|
|
432
|
+
neighbor_deg_sum=zeros.copy() if size else [],
|
|
433
|
+
degs=None,
|
|
434
|
+
edge_src=None,
|
|
435
|
+
edge_dst=None,
|
|
436
|
+
checksum=checksum,
|
|
437
|
+
)
|
|
438
|
+
else:
|
|
439
|
+
cache_new = prev_cache
|
|
440
|
+
cache_new.idx = idx_local
|
|
441
|
+
cache_new.theta = zeros.copy()
|
|
442
|
+
cache_new.epi = zeros.copy()
|
|
443
|
+
cache_new.vf = zeros.copy()
|
|
444
|
+
cache_new.cos_theta = [1.0] * size
|
|
445
|
+
cache_new.sin_theta = [0.0] * size
|
|
446
|
+
cache_new.neighbor_x = zeros.copy()
|
|
447
|
+
cache_new.neighbor_y = zeros.copy()
|
|
448
|
+
cache_new.neighbor_epi_sum = zeros.copy()
|
|
449
|
+
cache_new.neighbor_vf_sum = zeros.copy()
|
|
450
|
+
cache_new.neighbor_count = zeros.copy()
|
|
451
|
+
cache_new.neighbor_deg_sum = zeros.copy() if size else []
|
|
452
|
+
|
|
453
|
+
# Reset any numpy mirrors or aggregated buffers to avoid leaking
|
|
454
|
+
# state across refresh cycles (e.g. switching between vectorised
|
|
455
|
+
# and Python paths or reusing legacy caches).
|
|
456
|
+
for attr in _NUMPY_CACHE_ATTRS:
|
|
457
|
+
setattr(cache_new, attr, None)
|
|
458
|
+
for attr in (
|
|
459
|
+
"th_bar_np",
|
|
460
|
+
"epi_bar_np",
|
|
461
|
+
"vf_bar_np",
|
|
462
|
+
"deg_bar_np",
|
|
463
|
+
"grad_phase_np",
|
|
464
|
+
"grad_epi_np",
|
|
465
|
+
"grad_vf_np",
|
|
466
|
+
"grad_topo_np",
|
|
467
|
+
"grad_total_np",
|
|
468
|
+
):
|
|
469
|
+
setattr(cache_new, attr, None)
|
|
470
|
+
cache_new.edge_src = None
|
|
471
|
+
cache_new.edge_dst = None
|
|
472
|
+
cache_new.edge_signature = None
|
|
473
|
+
cache_new.neighbor_accum_signature = None
|
|
474
|
+
cache_new.degs = prev_cache.degs if prev_cache else None
|
|
475
|
+
cache_new.checksum = checksum
|
|
476
|
+
current.cache = cache_new
|
|
477
|
+
graph["_dnfr_prep_cache"] = cache_new
|
|
478
|
+
return current
|
|
479
|
+
|
|
480
|
+
with manager.timer(DNFR_PREP_STATE_KEY):
|
|
481
|
+
state = manager.update(DNFR_PREP_STATE_KEY, _rebuild)
|
|
482
|
+
manager.increment_miss(DNFR_PREP_STATE_KEY)
|
|
483
|
+
cache = state.cache
|
|
484
|
+
if not isinstance(cache, DnfrCache): # pragma: no cover - defensive guard
|
|
485
|
+
raise RuntimeError("ΔNFR cache initialisation failed")
|
|
127
486
|
return (
|
|
128
487
|
cache,
|
|
129
488
|
cache.idx,
|
|
@@ -136,7 +495,7 @@ def _init_dnfr_cache(G, nodes, prev_cache: DnfrCache | None, checksum, dirty):
|
|
|
136
495
|
)
|
|
137
496
|
|
|
138
497
|
|
|
139
|
-
def _ensure_numpy_vectors(cache: DnfrCache, np):
|
|
498
|
+
def _ensure_numpy_vectors(cache: DnfrCache, np: ModuleType) -> DnfrCacheVectors:
|
|
140
499
|
"""Ensure NumPy copies of cached vectors are initialised and up to date."""
|
|
141
500
|
|
|
142
501
|
if cache is None:
|
|
@@ -165,13 +524,19 @@ def _ensure_numpy_vectors(cache: DnfrCache, np):
|
|
|
165
524
|
return tuple(arrays)
|
|
166
525
|
|
|
167
526
|
|
|
168
|
-
def _ensure_numpy_degrees(
|
|
527
|
+
def _ensure_numpy_degrees(
|
|
528
|
+
cache: DnfrCache,
|
|
529
|
+
deg_list: Sequence[float] | None,
|
|
530
|
+
np: ModuleType,
|
|
531
|
+
) -> np.ndarray | None:
|
|
169
532
|
"""Initialise/update NumPy array mirroring ``deg_list``."""
|
|
170
533
|
|
|
171
|
-
if
|
|
534
|
+
if deg_list is None:
|
|
172
535
|
if cache is not None:
|
|
173
536
|
cache.deg_array = None
|
|
174
537
|
return None
|
|
538
|
+
if cache is None:
|
|
539
|
+
return np.array(deg_list, dtype=float)
|
|
175
540
|
arr = cache.deg_array
|
|
176
541
|
if arr is None or len(arr) != len(deg_list):
|
|
177
542
|
arr = np.array(deg_list, dtype=float)
|
|
@@ -181,20 +546,150 @@ def _ensure_numpy_degrees(cache: DnfrCache, deg_list, np):
|
|
|
181
546
|
return arr
|
|
182
547
|
|
|
183
548
|
|
|
184
|
-
def
|
|
549
|
+
def _resolve_numpy_degree_array(
|
|
550
|
+
data: MutableMapping[str, Any],
|
|
551
|
+
count: np.ndarray | None,
|
|
552
|
+
*,
|
|
553
|
+
cache: DnfrCache | None,
|
|
554
|
+
np: ModuleType,
|
|
555
|
+
) -> np.ndarray | None:
|
|
556
|
+
"""Return the vector of node degrees required for topology gradients."""
|
|
557
|
+
|
|
558
|
+
if data["w_topo"] == 0.0:
|
|
559
|
+
return None
|
|
560
|
+
deg_array = data.get("deg_array")
|
|
561
|
+
if deg_array is not None:
|
|
562
|
+
return deg_array
|
|
563
|
+
deg_list = data.get("deg_list")
|
|
564
|
+
if deg_list is not None:
|
|
565
|
+
deg_array = np.array(deg_list, dtype=float)
|
|
566
|
+
data["deg_array"] = deg_array
|
|
567
|
+
if cache is not None:
|
|
568
|
+
cache.deg_array = deg_array
|
|
569
|
+
return deg_array
|
|
570
|
+
return count
|
|
571
|
+
|
|
572
|
+
|
|
573
|
+
def _ensure_cached_array(
|
|
574
|
+
cache: DnfrCache | None,
|
|
575
|
+
attr: str,
|
|
576
|
+
shape: tuple[int, ...],
|
|
577
|
+
np: ModuleType,
|
|
578
|
+
) -> np.ndarray:
|
|
579
|
+
"""Return a cached NumPy buffer with ``shape`` creating/reusing it."""
|
|
580
|
+
|
|
581
|
+
if np is None:
|
|
582
|
+
raise RuntimeError("NumPy is required to build cached arrays")
|
|
583
|
+
arr = getattr(cache, attr) if cache is not None else None
|
|
584
|
+
if arr is None or getattr(arr, "shape", None) != shape:
|
|
585
|
+
arr = np.empty(shape, dtype=float)
|
|
586
|
+
if cache is not None:
|
|
587
|
+
setattr(cache, attr, arr)
|
|
588
|
+
return arr
|
|
589
|
+
|
|
590
|
+
|
|
591
|
+
def _ensure_numpy_state_vectors(data: MutableMapping[str, Any], np: ModuleType) -> DnfrVectorMap:
|
|
592
|
+
"""Synchronise list-based state vectors with their NumPy counterparts."""
|
|
593
|
+
|
|
594
|
+
nodes = data.get("nodes") or ()
|
|
595
|
+
size = len(nodes)
|
|
596
|
+
cache: DnfrCache | None = data.get("cache")
|
|
597
|
+
|
|
598
|
+
if cache is not None:
|
|
599
|
+
theta_np, epi_np, vf_np, cos_np, sin_np = _ensure_numpy_vectors(cache, np)
|
|
600
|
+
for key, arr in (
|
|
601
|
+
("theta_np", theta_np),
|
|
602
|
+
("epi_np", epi_np),
|
|
603
|
+
("vf_np", vf_np),
|
|
604
|
+
("cos_theta_np", cos_np),
|
|
605
|
+
("sin_theta_np", sin_np),
|
|
606
|
+
):
|
|
607
|
+
if arr is not None and getattr(arr, "shape", None) == (size,):
|
|
608
|
+
data[key] = arr
|
|
609
|
+
|
|
610
|
+
mapping = (
|
|
611
|
+
("theta_np", "theta"),
|
|
612
|
+
("epi_np", "epi"),
|
|
613
|
+
("vf_np", "vf"),
|
|
614
|
+
("cos_theta_np", "cos_theta"),
|
|
615
|
+
("sin_theta_np", "sin_theta"),
|
|
616
|
+
)
|
|
617
|
+
for np_key, src_key in mapping:
|
|
618
|
+
src = data.get(src_key)
|
|
619
|
+
if src is None:
|
|
620
|
+
continue
|
|
621
|
+
arr = data.get(np_key)
|
|
622
|
+
if arr is None or getattr(arr, "shape", None) != (size,):
|
|
623
|
+
arr = np.array(src, dtype=float)
|
|
624
|
+
elif cache is None:
|
|
625
|
+
np.copyto(arr, src, casting="unsafe")
|
|
626
|
+
data[np_key] = arr
|
|
627
|
+
if cache is not None:
|
|
628
|
+
setattr(cache, np_key, arr)
|
|
629
|
+
|
|
630
|
+
return {
|
|
631
|
+
"theta": data.get("theta_np"),
|
|
632
|
+
"epi": data.get("epi_np"),
|
|
633
|
+
"vf": data.get("vf_np"),
|
|
634
|
+
"cos": data.get("cos_theta_np"),
|
|
635
|
+
"sin": data.get("sin_theta_np"),
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
|
|
639
|
+
def _build_edge_index_arrays(
|
|
640
|
+
G: TNFRGraph,
|
|
641
|
+
nodes: Sequence[NodeId],
|
|
642
|
+
idx: Mapping[NodeId, int],
|
|
643
|
+
np: ModuleType,
|
|
644
|
+
) -> tuple[np.ndarray, np.ndarray]:
|
|
645
|
+
"""Create (src, dst) index arrays for ``G`` respecting ``nodes`` order."""
|
|
646
|
+
|
|
647
|
+
if np is None:
|
|
648
|
+
return None, None
|
|
649
|
+
if not nodes:
|
|
650
|
+
empty = np.empty(0, dtype=np.intp)
|
|
651
|
+
return empty, empty
|
|
652
|
+
|
|
653
|
+
src = []
|
|
654
|
+
dst = []
|
|
655
|
+
append_src = src.append
|
|
656
|
+
append_dst = dst.append
|
|
657
|
+
for node in nodes:
|
|
658
|
+
i = idx.get(node)
|
|
659
|
+
if i is None:
|
|
660
|
+
continue
|
|
661
|
+
for neighbor in G.neighbors(node):
|
|
662
|
+
j = idx.get(neighbor)
|
|
663
|
+
if j is None:
|
|
664
|
+
continue
|
|
665
|
+
append_src(i)
|
|
666
|
+
append_dst(j)
|
|
667
|
+
if not src:
|
|
668
|
+
empty = np.empty(0, dtype=np.intp)
|
|
669
|
+
return empty, empty
|
|
670
|
+
edge_src = np.asarray(src, dtype=np.intp)
|
|
671
|
+
edge_dst = np.asarray(dst, dtype=np.intp)
|
|
672
|
+
return edge_src, edge_dst
|
|
673
|
+
|
|
674
|
+
|
|
675
|
+
def _refresh_dnfr_vectors(
|
|
676
|
+
G: TNFRGraph, nodes: Sequence[NodeId], cache: DnfrCache
|
|
677
|
+
) -> None:
|
|
185
678
|
"""Update cached angle and state vectors for ΔNFR."""
|
|
186
|
-
|
|
187
|
-
trig = compute_theta_trig(((n, G.nodes[n]) for n in nodes), np=
|
|
188
|
-
use_numpy =
|
|
189
|
-
for
|
|
190
|
-
|
|
191
|
-
|
|
679
|
+
np_module = get_numpy()
|
|
680
|
+
trig = compute_theta_trig(((n, G.nodes[n]) for n in nodes), np=np_module)
|
|
681
|
+
use_numpy = _should_vectorize(G, np_module)
|
|
682
|
+
for index, node in enumerate(nodes):
|
|
683
|
+
i: int = int(index)
|
|
684
|
+
node_id: NodeId = node
|
|
685
|
+
nd = G.nodes[node_id]
|
|
686
|
+
cache.theta[i] = trig.theta[node_id]
|
|
192
687
|
cache.epi[i] = get_attr(nd, ALIAS_EPI, 0.0)
|
|
193
688
|
cache.vf[i] = get_attr(nd, ALIAS_VF, 0.0)
|
|
194
|
-
cache.cos_theta[i] = trig.cos[
|
|
195
|
-
cache.sin_theta[i] = trig.sin[
|
|
196
|
-
if use_numpy
|
|
197
|
-
_ensure_numpy_vectors(cache,
|
|
689
|
+
cache.cos_theta[i] = trig.cos[node_id]
|
|
690
|
+
cache.sin_theta[i] = trig.sin[node_id]
|
|
691
|
+
if use_numpy:
|
|
692
|
+
_ensure_numpy_vectors(cache, np_module)
|
|
198
693
|
else:
|
|
199
694
|
cache.theta_np = None
|
|
200
695
|
cache.epi_np = None
|
|
@@ -203,99 +698,236 @@ def _refresh_dnfr_vectors(G, nodes, cache: DnfrCache):
|
|
|
203
698
|
cache.sin_theta_np = None
|
|
204
699
|
|
|
205
700
|
|
|
206
|
-
def _prepare_dnfr_data(G, *, cache_size: int | None = 128) -> dict:
|
|
207
|
-
"""Precompute common data for ΔNFR strategies.
|
|
208
|
-
|
|
701
|
+
def _prepare_dnfr_data(G: TNFRGraph, *, cache_size: int | None = 128) -> dict[str, Any]:
|
|
702
|
+
"""Precompute common data for ΔNFR strategies.
|
|
703
|
+
|
|
704
|
+
The helper decides between edge-wise and dense adjacency accumulation
|
|
705
|
+
heuristically. Graphs whose edge density exceeds
|
|
706
|
+
``_SPARSE_DENSITY_THRESHOLD`` receive a cached adjacency matrix so the
|
|
707
|
+
dense path can be exercised; callers may also force the dense mode by
|
|
708
|
+
setting ``G.graph['dnfr_force_dense']`` to a truthy value.
|
|
709
|
+
"""
|
|
710
|
+
graph = G.graph
|
|
711
|
+
weights = graph.get("_dnfr_weights")
|
|
209
712
|
if weights is None:
|
|
210
713
|
weights = _configure_dnfr_weights(G)
|
|
211
714
|
|
|
212
|
-
|
|
213
|
-
|
|
715
|
+
result: dict[str, Any] = {
|
|
716
|
+
"weights": weights,
|
|
717
|
+
"cache_size": cache_size,
|
|
718
|
+
}
|
|
719
|
+
|
|
720
|
+
np_module = get_numpy()
|
|
721
|
+
use_numpy = _should_vectorize(G, np_module)
|
|
214
722
|
|
|
215
|
-
nodes
|
|
216
|
-
|
|
723
|
+
nodes = cast(tuple[NodeId, ...], cached_node_list(G))
|
|
724
|
+
edge_count = G.number_of_edges()
|
|
725
|
+
prefer_sparse = False
|
|
726
|
+
dense_override = bool(G.graph.get("dnfr_force_dense"))
|
|
727
|
+
if use_numpy:
|
|
728
|
+
prefer_sparse = _prefer_sparse_accumulation(len(nodes), edge_count)
|
|
729
|
+
if dense_override:
|
|
730
|
+
prefer_sparse = False
|
|
731
|
+
nodes_cached, A_untyped = cached_nodes_and_A(
|
|
732
|
+
G,
|
|
733
|
+
cache_size=cache_size,
|
|
734
|
+
require_numpy=False,
|
|
735
|
+
prefer_sparse=prefer_sparse,
|
|
736
|
+
nodes=nodes,
|
|
737
|
+
)
|
|
738
|
+
nodes = cast(tuple[NodeId, ...], nodes_cached)
|
|
739
|
+
A: np.ndarray | None = A_untyped
|
|
740
|
+
result["nodes"] = nodes
|
|
741
|
+
result["A"] = A
|
|
742
|
+
manager = _graph_cache_manager(G.graph)
|
|
217
743
|
checksum = G.graph.get("_dnfr_nodes_checksum")
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
744
|
+
dirty_flag = bool(G.graph.pop("_dnfr_prep_dirty", False))
|
|
745
|
+
existing_cache = cast(DnfrCache | None, graph.get("_dnfr_prep_cache"))
|
|
746
|
+
cache, idx, theta, epi, vf, cos_theta, sin_theta, refreshed = _init_dnfr_cache(
|
|
747
|
+
G,
|
|
748
|
+
nodes,
|
|
749
|
+
existing_cache,
|
|
750
|
+
checksum,
|
|
751
|
+
force_refresh=dirty_flag,
|
|
752
|
+
manager=manager,
|
|
221
753
|
)
|
|
754
|
+
dirty = dirty_flag or refreshed
|
|
755
|
+
result["cache"] = cache
|
|
756
|
+
result["idx"] = idx
|
|
757
|
+
result["theta"] = theta
|
|
758
|
+
result["epi"] = epi
|
|
759
|
+
result["vf"] = vf
|
|
760
|
+
result["cos_theta"] = cos_theta
|
|
761
|
+
result["sin_theta"] = sin_theta
|
|
222
762
|
if cache is not None:
|
|
223
763
|
_refresh_dnfr_vectors(G, nodes, cache)
|
|
764
|
+
if np_module is None:
|
|
765
|
+
for attr in (
|
|
766
|
+
"neighbor_x_np",
|
|
767
|
+
"neighbor_y_np",
|
|
768
|
+
"neighbor_epi_sum_np",
|
|
769
|
+
"neighbor_vf_sum_np",
|
|
770
|
+
"neighbor_count_np",
|
|
771
|
+
"neighbor_deg_sum_np",
|
|
772
|
+
"neighbor_inv_count_np",
|
|
773
|
+
"neighbor_cos_avg_np",
|
|
774
|
+
"neighbor_sin_avg_np",
|
|
775
|
+
"neighbor_mean_tmp_np",
|
|
776
|
+
"neighbor_mean_length_np",
|
|
777
|
+
"neighbor_accum_np",
|
|
778
|
+
"neighbor_edge_values_np",
|
|
779
|
+
):
|
|
780
|
+
setattr(cache, attr, None)
|
|
781
|
+
cache.neighbor_accum_signature = None
|
|
782
|
+
for attr in (
|
|
783
|
+
"th_bar_np",
|
|
784
|
+
"epi_bar_np",
|
|
785
|
+
"vf_bar_np",
|
|
786
|
+
"deg_bar_np",
|
|
787
|
+
"grad_phase_np",
|
|
788
|
+
"grad_epi_np",
|
|
789
|
+
"grad_vf_np",
|
|
790
|
+
"grad_topo_np",
|
|
791
|
+
"grad_total_np",
|
|
792
|
+
):
|
|
793
|
+
setattr(cache, attr, None)
|
|
224
794
|
|
|
225
795
|
w_phase = float(weights.get("phase", 0.0))
|
|
226
796
|
w_epi = float(weights.get("epi", 0.0))
|
|
227
797
|
w_vf = float(weights.get("vf", 0.0))
|
|
228
798
|
w_topo = float(weights.get("topo", 0.0))
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
799
|
+
result["w_phase"] = w_phase
|
|
800
|
+
result["w_epi"] = w_epi
|
|
801
|
+
result["w_vf"] = w_vf
|
|
802
|
+
result["w_topo"] = w_topo
|
|
803
|
+
degree_map = cast(dict[NodeId, float] | None, cache.degs if cache else None)
|
|
804
|
+
if cache is not None and dirty:
|
|
805
|
+
cache.degs = None
|
|
806
|
+
cache.deg_list = None
|
|
807
|
+
cache.deg_array = None
|
|
808
|
+
cache.edge_src = None
|
|
809
|
+
cache.edge_dst = None
|
|
810
|
+
cache.edge_signature = None
|
|
811
|
+
cache.neighbor_accum_signature = None
|
|
812
|
+
cache.neighbor_accum_np = None
|
|
813
|
+
cache.neighbor_edge_values_np = None
|
|
814
|
+
degree_map = None
|
|
815
|
+
if degree_map is None or len(degree_map) != len(G):
|
|
816
|
+
degree_map = {cast(NodeId, node): float(deg) for node, deg in G.degree()}
|
|
235
817
|
if cache is not None:
|
|
236
|
-
cache.degs =
|
|
818
|
+
cache.degs = degree_map
|
|
237
819
|
|
|
238
820
|
G.graph["_dnfr_prep_dirty"] = False
|
|
239
821
|
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
822
|
+
if (
|
|
823
|
+
cache is not None
|
|
824
|
+
and cache.deg_list is not None
|
|
825
|
+
and not dirty
|
|
826
|
+
and len(cache.deg_list) == len(nodes)
|
|
827
|
+
):
|
|
244
828
|
deg_list = cache.deg_list
|
|
245
829
|
else:
|
|
246
|
-
|
|
830
|
+
deg_list = [float(degree_map.get(node, 0.0)) for node in nodes]
|
|
831
|
+
if cache is not None:
|
|
832
|
+
cache.deg_list = deg_list
|
|
833
|
+
|
|
834
|
+
if w_topo != 0.0:
|
|
835
|
+
degs: dict[NodeId, float] | None = degree_map
|
|
836
|
+
else:
|
|
837
|
+
degs = None
|
|
838
|
+
result["degs"] = degs
|
|
839
|
+
result["deg_list"] = deg_list
|
|
840
|
+
|
|
841
|
+
deg_array: np.ndarray | None = None
|
|
842
|
+
if np_module is not None and deg_list is not None:
|
|
843
|
+
if cache is not None:
|
|
844
|
+
deg_array = _ensure_numpy_degrees(cache, deg_list, np_module)
|
|
845
|
+
else:
|
|
846
|
+
deg_array = np_module.array(deg_list, dtype=float)
|
|
847
|
+
elif cache is not None:
|
|
848
|
+
cache.deg_array = None
|
|
247
849
|
|
|
248
|
-
|
|
850
|
+
theta_np: np.ndarray | None
|
|
851
|
+
epi_np: np.ndarray | None
|
|
852
|
+
vf_np: np.ndarray | None
|
|
853
|
+
cos_theta_np: np.ndarray | None
|
|
854
|
+
sin_theta_np: np.ndarray | None
|
|
855
|
+
edge_src: np.ndarray | None
|
|
856
|
+
edge_dst: np.ndarray | None
|
|
857
|
+
if use_numpy:
|
|
249
858
|
theta_np, epi_np, vf_np, cos_theta_np, sin_theta_np = _ensure_numpy_vectors(
|
|
250
|
-
cache,
|
|
859
|
+
cache, np_module
|
|
251
860
|
)
|
|
252
|
-
|
|
861
|
+
edge_src = None
|
|
862
|
+
edge_dst = None
|
|
863
|
+
if cache is not None:
|
|
864
|
+
edge_src = cache.edge_src
|
|
865
|
+
edge_dst = cache.edge_dst
|
|
866
|
+
if edge_src is None or edge_dst is None or dirty:
|
|
867
|
+
edge_src, edge_dst = _build_edge_index_arrays(
|
|
868
|
+
G, nodes, idx, np_module
|
|
869
|
+
)
|
|
870
|
+
cache.edge_src = edge_src
|
|
871
|
+
cache.edge_dst = edge_dst
|
|
872
|
+
else:
|
|
873
|
+
edge_src, edge_dst = _build_edge_index_arrays(G, nodes, idx, np_module)
|
|
874
|
+
|
|
875
|
+
if cache is not None:
|
|
876
|
+
for attr in ("neighbor_accum_np", "neighbor_edge_values_np"):
|
|
877
|
+
arr = getattr(cache, attr, None)
|
|
878
|
+
if arr is not None:
|
|
879
|
+
result[attr] = arr
|
|
880
|
+
if edge_src is not None and edge_dst is not None:
|
|
881
|
+
signature = (id(edge_src), id(edge_dst), len(nodes))
|
|
882
|
+
result["edge_signature"] = signature
|
|
883
|
+
if cache is not None:
|
|
884
|
+
cache.edge_signature = signature
|
|
253
885
|
else:
|
|
254
886
|
theta_np = None
|
|
255
887
|
epi_np = None
|
|
256
888
|
vf_np = None
|
|
257
889
|
cos_theta_np = None
|
|
258
890
|
sin_theta_np = None
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
"
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
"cache_size": cache_size,
|
|
285
|
-
"cache": cache,
|
|
286
|
-
}
|
|
891
|
+
edge_src = None
|
|
892
|
+
edge_dst = None
|
|
893
|
+
if cache is not None:
|
|
894
|
+
cache.edge_src = None
|
|
895
|
+
cache.edge_dst = None
|
|
896
|
+
|
|
897
|
+
result.setdefault("neighbor_edge_values_np", None)
|
|
898
|
+
if cache is not None and "edge_signature" not in result:
|
|
899
|
+
result["edge_signature"] = cache.edge_signature
|
|
900
|
+
|
|
901
|
+
result["theta_np"] = theta_np
|
|
902
|
+
result["epi_np"] = epi_np
|
|
903
|
+
result["vf_np"] = vf_np
|
|
904
|
+
result["cos_theta_np"] = cos_theta_np
|
|
905
|
+
result["sin_theta_np"] = sin_theta_np
|
|
906
|
+
result["deg_array"] = deg_array
|
|
907
|
+
result["edge_src"] = edge_src
|
|
908
|
+
result["edge_dst"] = edge_dst
|
|
909
|
+
result["edge_count"] = edge_count
|
|
910
|
+
result["prefer_sparse"] = prefer_sparse
|
|
911
|
+
result["dense_override"] = dense_override
|
|
912
|
+
result.setdefault("neighbor_accum_np", None)
|
|
913
|
+
result.setdefault("neighbor_accum_signature", None)
|
|
914
|
+
|
|
915
|
+
return result
|
|
287
916
|
|
|
288
917
|
|
|
289
918
|
def _apply_dnfr_gradients(
|
|
290
|
-
G,
|
|
291
|
-
data,
|
|
292
|
-
th_bar,
|
|
293
|
-
epi_bar,
|
|
294
|
-
vf_bar,
|
|
295
|
-
deg_bar=None,
|
|
296
|
-
degs=None,
|
|
297
|
-
|
|
919
|
+
G: TNFRGraph,
|
|
920
|
+
data: MutableMapping[str, Any],
|
|
921
|
+
th_bar: Sequence[float] | np.ndarray,
|
|
922
|
+
epi_bar: Sequence[float] | np.ndarray,
|
|
923
|
+
vf_bar: Sequence[float] | np.ndarray,
|
|
924
|
+
deg_bar: Sequence[float] | np.ndarray | None = None,
|
|
925
|
+
degs: Mapping[Any, float] | Sequence[float] | np.ndarray | None = None,
|
|
926
|
+
*,
|
|
927
|
+
n_jobs: int | None = None,
|
|
928
|
+
) -> None:
|
|
298
929
|
"""Combine precomputed gradients and write ΔNFR to each node."""
|
|
930
|
+
np = get_numpy()
|
|
299
931
|
nodes = data["nodes"]
|
|
300
932
|
theta = data["theta"]
|
|
301
933
|
epi = data["epi"]
|
|
@@ -307,25 +939,146 @@ def _apply_dnfr_gradients(
|
|
|
307
939
|
if degs is None:
|
|
308
940
|
degs = data.get("degs")
|
|
309
941
|
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
942
|
+
cache: DnfrCache | None = data.get("cache")
|
|
943
|
+
|
|
944
|
+
theta_np = data.get("theta_np")
|
|
945
|
+
epi_np = data.get("epi_np")
|
|
946
|
+
vf_np = data.get("vf_np")
|
|
947
|
+
deg_array = data.get("deg_array") if w_topo != 0.0 else None
|
|
948
|
+
|
|
949
|
+
use_vector = (
|
|
950
|
+
np is not None
|
|
951
|
+
and theta_np is not None
|
|
952
|
+
and epi_np is not None
|
|
953
|
+
and vf_np is not None
|
|
954
|
+
and isinstance(th_bar, np.ndarray)
|
|
955
|
+
and isinstance(epi_bar, np.ndarray)
|
|
956
|
+
and isinstance(vf_bar, np.ndarray)
|
|
957
|
+
)
|
|
958
|
+
if use_vector and w_topo != 0.0:
|
|
959
|
+
use_vector = (
|
|
960
|
+
deg_bar is not None
|
|
961
|
+
and isinstance(deg_bar, np.ndarray)
|
|
962
|
+
and isinstance(deg_array, np.ndarray)
|
|
324
963
|
)
|
|
325
|
-
set_dnfr(G, n, float(dnfr))
|
|
326
964
|
|
|
965
|
+
if use_vector:
|
|
966
|
+
grad_phase = _ensure_cached_array(cache, "grad_phase_np", theta_np.shape, np)
|
|
967
|
+
grad_epi = _ensure_cached_array(cache, "grad_epi_np", epi_np.shape, np)
|
|
968
|
+
grad_vf = _ensure_cached_array(cache, "grad_vf_np", vf_np.shape, np)
|
|
969
|
+
grad_total = _ensure_cached_array(cache, "grad_total_np", theta_np.shape, np)
|
|
970
|
+
grad_topo = None
|
|
971
|
+
if w_topo != 0.0:
|
|
972
|
+
grad_topo = _ensure_cached_array(
|
|
973
|
+
cache, "grad_topo_np", deg_array.shape, np
|
|
974
|
+
)
|
|
327
975
|
|
|
328
|
-
|
|
976
|
+
np.copyto(grad_phase, theta_np, casting="unsafe")
|
|
977
|
+
grad_phase -= th_bar
|
|
978
|
+
grad_phase += math.pi
|
|
979
|
+
np.mod(grad_phase, math.tau, out=grad_phase)
|
|
980
|
+
grad_phase -= math.pi
|
|
981
|
+
grad_phase *= -1.0 / math.pi
|
|
982
|
+
|
|
983
|
+
np.copyto(grad_epi, epi_bar, casting="unsafe")
|
|
984
|
+
grad_epi -= epi_np
|
|
985
|
+
|
|
986
|
+
np.copyto(grad_vf, vf_bar, casting="unsafe")
|
|
987
|
+
grad_vf -= vf_np
|
|
988
|
+
|
|
989
|
+
if grad_topo is not None and deg_bar is not None:
|
|
990
|
+
np.copyto(grad_topo, deg_bar, casting="unsafe")
|
|
991
|
+
grad_topo -= deg_array
|
|
992
|
+
|
|
993
|
+
if w_phase != 0.0:
|
|
994
|
+
np.multiply(grad_phase, w_phase, out=grad_total)
|
|
995
|
+
else:
|
|
996
|
+
grad_total.fill(0.0)
|
|
997
|
+
if w_epi != 0.0:
|
|
998
|
+
if w_epi != 1.0:
|
|
999
|
+
np.multiply(grad_epi, w_epi, out=grad_epi)
|
|
1000
|
+
np.add(grad_total, grad_epi, out=grad_total)
|
|
1001
|
+
if w_vf != 0.0:
|
|
1002
|
+
if w_vf != 1.0:
|
|
1003
|
+
np.multiply(grad_vf, w_vf, out=grad_vf)
|
|
1004
|
+
np.add(grad_total, grad_vf, out=grad_total)
|
|
1005
|
+
if w_topo != 0.0 and grad_topo is not None:
|
|
1006
|
+
if w_topo != 1.0:
|
|
1007
|
+
np.multiply(grad_topo, w_topo, out=grad_topo)
|
|
1008
|
+
np.add(grad_total, grad_topo, out=grad_total)
|
|
1009
|
+
|
|
1010
|
+
dnfr_values = grad_total
|
|
1011
|
+
else:
|
|
1012
|
+
effective_jobs = _resolve_parallel_jobs(n_jobs, len(nodes))
|
|
1013
|
+
if effective_jobs:
|
|
1014
|
+
chunk_results = []
|
|
1015
|
+
with ProcessPoolExecutor(max_workers=effective_jobs) as executor:
|
|
1016
|
+
futures = []
|
|
1017
|
+
for start, end in _iter_chunk_offsets(len(nodes), effective_jobs):
|
|
1018
|
+
if start == end:
|
|
1019
|
+
continue
|
|
1020
|
+
futures.append(
|
|
1021
|
+
executor.submit(
|
|
1022
|
+
_dnfr_gradients_worker,
|
|
1023
|
+
start,
|
|
1024
|
+
end,
|
|
1025
|
+
nodes,
|
|
1026
|
+
theta,
|
|
1027
|
+
epi,
|
|
1028
|
+
vf,
|
|
1029
|
+
th_bar,
|
|
1030
|
+
epi_bar,
|
|
1031
|
+
vf_bar,
|
|
1032
|
+
deg_bar,
|
|
1033
|
+
degs,
|
|
1034
|
+
w_phase,
|
|
1035
|
+
w_epi,
|
|
1036
|
+
w_vf,
|
|
1037
|
+
w_topo,
|
|
1038
|
+
)
|
|
1039
|
+
)
|
|
1040
|
+
for future in futures:
|
|
1041
|
+
chunk_results.append(future.result())
|
|
1042
|
+
|
|
1043
|
+
dnfr_values = [0.0] * len(nodes)
|
|
1044
|
+
for start, chunk in sorted(chunk_results, key=lambda item: item[0]):
|
|
1045
|
+
end = start + len(chunk)
|
|
1046
|
+
dnfr_values[start:end] = chunk
|
|
1047
|
+
else:
|
|
1048
|
+
dnfr_values = []
|
|
1049
|
+
for i, n in enumerate(nodes):
|
|
1050
|
+
g_phase = -angle_diff(theta[i], th_bar[i]) / math.pi
|
|
1051
|
+
g_epi = epi_bar[i] - epi[i]
|
|
1052
|
+
g_vf = vf_bar[i] - vf[i]
|
|
1053
|
+
if w_topo != 0.0 and deg_bar is not None and degs is not None:
|
|
1054
|
+
if isinstance(degs, dict):
|
|
1055
|
+
deg_i = float(degs.get(n, 0))
|
|
1056
|
+
else:
|
|
1057
|
+
deg_i = float(degs[i])
|
|
1058
|
+
g_topo = deg_bar[i] - deg_i
|
|
1059
|
+
else:
|
|
1060
|
+
g_topo = 0.0
|
|
1061
|
+
dnfr_values.append(
|
|
1062
|
+
w_phase * g_phase + w_epi * g_epi + w_vf * g_vf + w_topo * g_topo
|
|
1063
|
+
)
|
|
1064
|
+
|
|
1065
|
+
if cache is not None:
|
|
1066
|
+
cache.grad_phase_np = None
|
|
1067
|
+
cache.grad_epi_np = None
|
|
1068
|
+
cache.grad_vf_np = None
|
|
1069
|
+
cache.grad_topo_np = None
|
|
1070
|
+
cache.grad_total_np = None
|
|
1071
|
+
|
|
1072
|
+
for i, n in enumerate(nodes):
|
|
1073
|
+
set_dnfr(G, n, float(dnfr_values[i]))
|
|
1074
|
+
|
|
1075
|
+
|
|
1076
|
+
def _init_bar_arrays(
|
|
1077
|
+
data: MutableMapping[str, Any],
|
|
1078
|
+
*,
|
|
1079
|
+
degs: Mapping[Any, float] | Sequence[float] | None = None,
|
|
1080
|
+
np: ModuleType | None = None,
|
|
1081
|
+
) -> tuple[Sequence[float], Sequence[float], Sequence[float], Sequence[float] | None]:
|
|
329
1082
|
"""Prepare containers for neighbour means.
|
|
330
1083
|
|
|
331
1084
|
If ``np`` is provided, NumPy arrays are created; otherwise lists are used.
|
|
@@ -333,97 +1086,221 @@ def _init_bar_arrays(data, *, degs=None, np=None):
|
|
|
333
1086
|
active.
|
|
334
1087
|
"""
|
|
335
1088
|
|
|
1089
|
+
nodes = data["nodes"]
|
|
336
1090
|
theta = data["theta"]
|
|
337
1091
|
epi = data["epi"]
|
|
338
1092
|
vf = data["vf"]
|
|
339
1093
|
w_topo = data["w_topo"]
|
|
1094
|
+
cache: DnfrCache | None = data.get("cache")
|
|
340
1095
|
if np is None:
|
|
341
1096
|
np = get_numpy()
|
|
342
1097
|
if np is not None:
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
1098
|
+
size = len(theta)
|
|
1099
|
+
if cache is not None:
|
|
1100
|
+
th_bar = cache.th_bar_np
|
|
1101
|
+
if th_bar is None or getattr(th_bar, "shape", None) != (size,):
|
|
1102
|
+
th_bar = np.array(theta, dtype=float)
|
|
1103
|
+
else:
|
|
1104
|
+
np.copyto(th_bar, theta, casting="unsafe")
|
|
1105
|
+
cache.th_bar_np = th_bar
|
|
1106
|
+
|
|
1107
|
+
epi_bar = cache.epi_bar_np
|
|
1108
|
+
if epi_bar is None or getattr(epi_bar, "shape", None) != (size,):
|
|
1109
|
+
epi_bar = np.array(epi, dtype=float)
|
|
1110
|
+
else:
|
|
1111
|
+
np.copyto(epi_bar, epi, casting="unsafe")
|
|
1112
|
+
cache.epi_bar_np = epi_bar
|
|
1113
|
+
|
|
1114
|
+
vf_bar = cache.vf_bar_np
|
|
1115
|
+
if vf_bar is None or getattr(vf_bar, "shape", None) != (size,):
|
|
1116
|
+
vf_bar = np.array(vf, dtype=float)
|
|
1117
|
+
else:
|
|
1118
|
+
np.copyto(vf_bar, vf, casting="unsafe")
|
|
1119
|
+
cache.vf_bar_np = vf_bar
|
|
1120
|
+
|
|
1121
|
+
if w_topo != 0.0 and degs is not None:
|
|
1122
|
+
if isinstance(degs, dict):
|
|
1123
|
+
deg_size = len(nodes)
|
|
1124
|
+
else:
|
|
1125
|
+
deg_size = len(degs)
|
|
1126
|
+
deg_bar = cache.deg_bar_np
|
|
1127
|
+
if (
|
|
1128
|
+
deg_bar is None
|
|
1129
|
+
or getattr(deg_bar, "shape", None) != (deg_size,)
|
|
1130
|
+
):
|
|
1131
|
+
if isinstance(degs, dict):
|
|
1132
|
+
deg_bar = np.array(
|
|
1133
|
+
[float(degs.get(node, 0.0)) for node in nodes],
|
|
1134
|
+
dtype=float,
|
|
1135
|
+
)
|
|
1136
|
+
else:
|
|
1137
|
+
deg_bar = np.array(degs, dtype=float)
|
|
1138
|
+
else:
|
|
1139
|
+
if isinstance(degs, dict):
|
|
1140
|
+
for i, node in enumerate(nodes):
|
|
1141
|
+
deg_bar[i] = float(degs.get(node, 0.0))
|
|
1142
|
+
else:
|
|
1143
|
+
np.copyto(deg_bar, degs, casting="unsafe")
|
|
1144
|
+
cache.deg_bar_np = deg_bar
|
|
1145
|
+
else:
|
|
1146
|
+
deg_bar = None
|
|
1147
|
+
if cache is not None:
|
|
1148
|
+
cache.deg_bar_np = None
|
|
1149
|
+
else:
|
|
1150
|
+
th_bar = np.array(theta, dtype=float)
|
|
1151
|
+
epi_bar = np.array(epi, dtype=float)
|
|
1152
|
+
vf_bar = np.array(vf, dtype=float)
|
|
1153
|
+
deg_bar = (
|
|
1154
|
+
np.array(degs, dtype=float)
|
|
1155
|
+
if w_topo != 0.0 and degs is not None
|
|
1156
|
+
else None
|
|
1157
|
+
)
|
|
351
1158
|
else:
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
1159
|
+
size = len(theta)
|
|
1160
|
+
if cache is not None:
|
|
1161
|
+
th_bar = cache.th_bar
|
|
1162
|
+
if th_bar is None or len(th_bar) != size:
|
|
1163
|
+
th_bar = [0.0] * size
|
|
1164
|
+
th_bar[:] = theta
|
|
1165
|
+
cache.th_bar = th_bar
|
|
1166
|
+
|
|
1167
|
+
epi_bar = cache.epi_bar
|
|
1168
|
+
if epi_bar is None or len(epi_bar) != size:
|
|
1169
|
+
epi_bar = [0.0] * size
|
|
1170
|
+
epi_bar[:] = epi
|
|
1171
|
+
cache.epi_bar = epi_bar
|
|
1172
|
+
|
|
1173
|
+
vf_bar = cache.vf_bar
|
|
1174
|
+
if vf_bar is None or len(vf_bar) != size:
|
|
1175
|
+
vf_bar = [0.0] * size
|
|
1176
|
+
vf_bar[:] = vf
|
|
1177
|
+
cache.vf_bar = vf_bar
|
|
1178
|
+
|
|
1179
|
+
if w_topo != 0.0 and degs is not None:
|
|
1180
|
+
if isinstance(degs, dict):
|
|
1181
|
+
deg_size = len(nodes)
|
|
1182
|
+
else:
|
|
1183
|
+
deg_size = len(degs)
|
|
1184
|
+
deg_bar = cache.deg_bar
|
|
1185
|
+
if deg_bar is None or len(deg_bar) != deg_size:
|
|
1186
|
+
deg_bar = [0.0] * deg_size
|
|
1187
|
+
if isinstance(degs, dict):
|
|
1188
|
+
for i, node in enumerate(nodes):
|
|
1189
|
+
deg_bar[i] = float(degs.get(node, 0.0))
|
|
1190
|
+
else:
|
|
1191
|
+
for i, value in enumerate(degs):
|
|
1192
|
+
deg_bar[i] = float(value)
|
|
1193
|
+
cache.deg_bar = deg_bar
|
|
1194
|
+
else:
|
|
1195
|
+
deg_bar = None
|
|
1196
|
+
cache.deg_bar = None
|
|
1197
|
+
else:
|
|
1198
|
+
th_bar = list(theta)
|
|
1199
|
+
epi_bar = list(epi)
|
|
1200
|
+
vf_bar = list(vf)
|
|
1201
|
+
deg_bar = (
|
|
1202
|
+
list(degs) if w_topo != 0.0 and degs is not None else None
|
|
1203
|
+
)
|
|
356
1204
|
return th_bar, epi_bar, vf_bar, deg_bar
|
|
357
1205
|
|
|
358
1206
|
|
|
359
1207
|
def _compute_neighbor_means(
|
|
360
|
-
G,
|
|
361
|
-
data,
|
|
1208
|
+
G: TNFRGraph,
|
|
1209
|
+
data: MutableMapping[str, Any],
|
|
362
1210
|
*,
|
|
363
|
-
x,
|
|
364
|
-
y,
|
|
365
|
-
epi_sum,
|
|
366
|
-
vf_sum,
|
|
367
|
-
count,
|
|
368
|
-
deg_sum=None,
|
|
369
|
-
degs=None,
|
|
370
|
-
np=None,
|
|
371
|
-
):
|
|
1211
|
+
x: Sequence[float],
|
|
1212
|
+
y: Sequence[float],
|
|
1213
|
+
epi_sum: Sequence[float],
|
|
1214
|
+
vf_sum: Sequence[float],
|
|
1215
|
+
count: Sequence[float] | np.ndarray,
|
|
1216
|
+
deg_sum: Sequence[float] | None = None,
|
|
1217
|
+
degs: Mapping[Any, float] | Sequence[float] | None = None,
|
|
1218
|
+
np: ModuleType | None = None,
|
|
1219
|
+
) -> tuple[Sequence[float], Sequence[float], Sequence[float], Sequence[float] | None]:
|
|
372
1220
|
"""Return neighbour mean arrays for ΔNFR."""
|
|
373
1221
|
w_topo = data["w_topo"]
|
|
374
1222
|
theta = data["theta"]
|
|
1223
|
+
cache: DnfrCache | None = data.get("cache")
|
|
375
1224
|
is_numpy = np is not None and isinstance(count, np.ndarray)
|
|
376
1225
|
th_bar, epi_bar, vf_bar, deg_bar = _init_bar_arrays(
|
|
377
1226
|
data, degs=degs, np=np if is_numpy else None
|
|
378
1227
|
)
|
|
379
1228
|
|
|
380
1229
|
if is_numpy:
|
|
1230
|
+
n = count.shape[0]
|
|
381
1231
|
mask = count > 0
|
|
382
|
-
if np.any(mask):
|
|
383
|
-
th_bar
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
1232
|
+
if not np.any(mask):
|
|
1233
|
+
return th_bar, epi_bar, vf_bar, deg_bar
|
|
1234
|
+
|
|
1235
|
+
inv = _ensure_cached_array(cache, "neighbor_inv_count_np", (n,), np)
|
|
1236
|
+
inv.fill(0.0)
|
|
1237
|
+
np.divide(1.0, count, out=inv, where=mask)
|
|
1238
|
+
|
|
1239
|
+
cos_avg = _ensure_cached_array(cache, "neighbor_cos_avg_np", (n,), np)
|
|
1240
|
+
cos_avg.fill(0.0)
|
|
1241
|
+
np.multiply(x, inv, out=cos_avg, where=mask)
|
|
1242
|
+
|
|
1243
|
+
sin_avg = _ensure_cached_array(cache, "neighbor_sin_avg_np", (n,), np)
|
|
1244
|
+
sin_avg.fill(0.0)
|
|
1245
|
+
np.multiply(y, inv, out=sin_avg, where=mask)
|
|
1246
|
+
|
|
1247
|
+
lengths = _ensure_cached_array(cache, "neighbor_mean_length_np", (n,), np)
|
|
1248
|
+
np.hypot(cos_avg, sin_avg, out=lengths)
|
|
1249
|
+
|
|
1250
|
+
temp = _ensure_cached_array(cache, "neighbor_mean_tmp_np", (n,), np)
|
|
1251
|
+
np.arctan2(sin_avg, cos_avg, out=temp)
|
|
1252
|
+
|
|
1253
|
+
theta_src = data.get("theta_np")
|
|
1254
|
+
if theta_src is None:
|
|
1255
|
+
theta_src = np.asarray(theta, dtype=float)
|
|
1256
|
+
zero_mask = lengths <= _MEAN_VECTOR_EPS
|
|
1257
|
+
np.copyto(temp, theta_src, where=zero_mask)
|
|
1258
|
+
np.copyto(th_bar, temp, where=mask, casting="unsafe")
|
|
1259
|
+
|
|
1260
|
+
np.divide(epi_sum, count, out=epi_bar, where=mask)
|
|
1261
|
+
np.divide(vf_sum, count, out=vf_bar, where=mask)
|
|
1262
|
+
if w_topo != 0.0 and deg_bar is not None and deg_sum is not None:
|
|
1263
|
+
np.divide(deg_sum, count, out=deg_bar, where=mask)
|
|
390
1264
|
return th_bar, epi_bar, vf_bar, deg_bar
|
|
391
1265
|
|
|
392
1266
|
n = len(theta)
|
|
393
|
-
cos_th = data["cos_theta"]
|
|
394
|
-
sin_th = data["sin_theta"]
|
|
395
|
-
idx = data["idx"]
|
|
396
|
-
nodes = data["nodes"]
|
|
397
|
-
deg_list = data.get("deg_list")
|
|
398
1267
|
for i in range(n):
|
|
399
1268
|
c = count[i]
|
|
400
|
-
if c:
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
1269
|
+
if not c:
|
|
1270
|
+
continue
|
|
1271
|
+
inv = 1.0 / float(c)
|
|
1272
|
+
cos_avg = x[i] * inv
|
|
1273
|
+
sin_avg = y[i] * inv
|
|
1274
|
+
if math.hypot(cos_avg, sin_avg) <= _MEAN_VECTOR_EPS:
|
|
1275
|
+
th_bar[i] = theta[i]
|
|
1276
|
+
else:
|
|
1277
|
+
th_bar[i] = math.atan2(sin_avg, cos_avg)
|
|
1278
|
+
epi_bar[i] = epi_sum[i] * inv
|
|
1279
|
+
vf_bar[i] = vf_sum[i] * inv
|
|
1280
|
+
if w_topo != 0.0 and deg_bar is not None and deg_sum is not None:
|
|
1281
|
+
deg_bar[i] = deg_sum[i] * inv
|
|
410
1282
|
return th_bar, epi_bar, vf_bar, deg_bar
|
|
411
1283
|
|
|
412
1284
|
|
|
413
1285
|
def _compute_dnfr_common(
|
|
414
|
-
G,
|
|
415
|
-
data,
|
|
1286
|
+
G: TNFRGraph,
|
|
1287
|
+
data: MutableMapping[str, Any],
|
|
416
1288
|
*,
|
|
417
|
-
x,
|
|
418
|
-
y,
|
|
419
|
-
epi_sum,
|
|
420
|
-
vf_sum,
|
|
421
|
-
count,
|
|
422
|
-
deg_sum=None,
|
|
423
|
-
degs=None,
|
|
424
|
-
|
|
1289
|
+
x: Sequence[float],
|
|
1290
|
+
y: Sequence[float],
|
|
1291
|
+
epi_sum: Sequence[float],
|
|
1292
|
+
vf_sum: Sequence[float],
|
|
1293
|
+
count: Sequence[float] | None,
|
|
1294
|
+
deg_sum: Sequence[float] | None = None,
|
|
1295
|
+
degs: Sequence[float] | None = None,
|
|
1296
|
+
n_jobs: int | None = None,
|
|
1297
|
+
) -> None:
|
|
425
1298
|
"""Compute neighbour means and apply ΔNFR gradients."""
|
|
426
|
-
|
|
1299
|
+
np_module = get_numpy()
|
|
1300
|
+
if np_module is not None and isinstance(count, getattr(np_module, "ndarray", tuple)):
|
|
1301
|
+
np_arg = np_module
|
|
1302
|
+
else:
|
|
1303
|
+
np_arg = None
|
|
427
1304
|
th_bar, epi_bar, vf_bar, deg_bar = _compute_neighbor_means(
|
|
428
1305
|
G,
|
|
429
1306
|
data,
|
|
@@ -434,121 +1311,571 @@ def _compute_dnfr_common(
|
|
|
434
1311
|
count=count,
|
|
435
1312
|
deg_sum=deg_sum,
|
|
436
1313
|
degs=degs,
|
|
437
|
-
np=
|
|
1314
|
+
np=np_arg,
|
|
1315
|
+
)
|
|
1316
|
+
_apply_dnfr_gradients(
|
|
1317
|
+
G,
|
|
1318
|
+
data,
|
|
1319
|
+
th_bar,
|
|
1320
|
+
epi_bar,
|
|
1321
|
+
vf_bar,
|
|
1322
|
+
deg_bar,
|
|
1323
|
+
degs,
|
|
1324
|
+
n_jobs=n_jobs,
|
|
438
1325
|
)
|
|
439
|
-
_apply_dnfr_gradients(G, data, th_bar, epi_bar, vf_bar, deg_bar, degs)
|
|
440
1326
|
|
|
441
1327
|
|
|
442
|
-
def
|
|
1328
|
+
def _reset_numpy_buffer(
|
|
1329
|
+
buffer: np.ndarray | None,
|
|
1330
|
+
size: int,
|
|
1331
|
+
np: ModuleType,
|
|
1332
|
+
) -> np.ndarray:
|
|
1333
|
+
if buffer is None or getattr(buffer, "shape", None) is None or buffer.shape[0] != size:
|
|
1334
|
+
return np.zeros(size, dtype=float)
|
|
1335
|
+
buffer.fill(0.0)
|
|
1336
|
+
return buffer
|
|
1337
|
+
|
|
1338
|
+
|
|
1339
|
+
def _init_neighbor_sums(
|
|
1340
|
+
data: MutableMapping[str, Any],
|
|
1341
|
+
*,
|
|
1342
|
+
np: ModuleType | None = None,
|
|
1343
|
+
) -> NeighborStats:
|
|
443
1344
|
"""Initialise containers for neighbour sums."""
|
|
444
1345
|
nodes = data["nodes"]
|
|
445
1346
|
n = len(nodes)
|
|
446
1347
|
w_topo = data["w_topo"]
|
|
1348
|
+
cache: DnfrCache | None = data.get("cache")
|
|
1349
|
+
|
|
1350
|
+
def _reset_list(buffer: list[float] | None, value: float = 0.0) -> list[float]:
|
|
1351
|
+
if buffer is None or len(buffer) != n:
|
|
1352
|
+
return [value] * n
|
|
1353
|
+
for i in range(n):
|
|
1354
|
+
buffer[i] = value
|
|
1355
|
+
return buffer
|
|
1356
|
+
|
|
447
1357
|
if np is not None:
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
1358
|
+
if cache is not None:
|
|
1359
|
+
x = cache.neighbor_x_np
|
|
1360
|
+
y = cache.neighbor_y_np
|
|
1361
|
+
epi_sum = cache.neighbor_epi_sum_np
|
|
1362
|
+
vf_sum = cache.neighbor_vf_sum_np
|
|
1363
|
+
count = cache.neighbor_count_np
|
|
1364
|
+
x = _reset_numpy_buffer(x, n, np)
|
|
1365
|
+
y = _reset_numpy_buffer(y, n, np)
|
|
1366
|
+
epi_sum = _reset_numpy_buffer(epi_sum, n, np)
|
|
1367
|
+
vf_sum = _reset_numpy_buffer(vf_sum, n, np)
|
|
1368
|
+
count = _reset_numpy_buffer(count, n, np)
|
|
1369
|
+
cache.neighbor_x_np = x
|
|
1370
|
+
cache.neighbor_y_np = y
|
|
1371
|
+
cache.neighbor_epi_sum_np = epi_sum
|
|
1372
|
+
cache.neighbor_vf_sum_np = vf_sum
|
|
1373
|
+
cache.neighbor_count_np = count
|
|
1374
|
+
cache.neighbor_x = _reset_list(cache.neighbor_x)
|
|
1375
|
+
cache.neighbor_y = _reset_list(cache.neighbor_y)
|
|
1376
|
+
cache.neighbor_epi_sum = _reset_list(cache.neighbor_epi_sum)
|
|
1377
|
+
cache.neighbor_vf_sum = _reset_list(cache.neighbor_vf_sum)
|
|
1378
|
+
cache.neighbor_count = _reset_list(cache.neighbor_count)
|
|
1379
|
+
if w_topo != 0.0:
|
|
1380
|
+
deg_sum = _reset_numpy_buffer(cache.neighbor_deg_sum_np, n, np)
|
|
1381
|
+
cache.neighbor_deg_sum_np = deg_sum
|
|
1382
|
+
cache.neighbor_deg_sum = _reset_list(cache.neighbor_deg_sum)
|
|
1383
|
+
else:
|
|
1384
|
+
cache.neighbor_deg_sum_np = None
|
|
1385
|
+
cache.neighbor_deg_sum = None
|
|
1386
|
+
deg_sum = None
|
|
1387
|
+
else:
|
|
1388
|
+
x = np.zeros(n, dtype=float)
|
|
1389
|
+
y = np.zeros(n, dtype=float)
|
|
1390
|
+
epi_sum = np.zeros(n, dtype=float)
|
|
1391
|
+
vf_sum = np.zeros(n, dtype=float)
|
|
1392
|
+
count = np.zeros(n, dtype=float)
|
|
1393
|
+
deg_sum = np.zeros(n, dtype=float) if w_topo != 0.0 else None
|
|
454
1394
|
degs = None
|
|
455
1395
|
else:
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
1396
|
+
if cache is not None:
|
|
1397
|
+
x = _reset_list(cache.neighbor_x)
|
|
1398
|
+
y = _reset_list(cache.neighbor_y)
|
|
1399
|
+
epi_sum = _reset_list(cache.neighbor_epi_sum)
|
|
1400
|
+
vf_sum = _reset_list(cache.neighbor_vf_sum)
|
|
1401
|
+
count = _reset_list(cache.neighbor_count)
|
|
1402
|
+
cache.neighbor_x = x
|
|
1403
|
+
cache.neighbor_y = y
|
|
1404
|
+
cache.neighbor_epi_sum = epi_sum
|
|
1405
|
+
cache.neighbor_vf_sum = vf_sum
|
|
1406
|
+
cache.neighbor_count = count
|
|
1407
|
+
if w_topo != 0.0:
|
|
1408
|
+
deg_sum = _reset_list(cache.neighbor_deg_sum)
|
|
1409
|
+
cache.neighbor_deg_sum = deg_sum
|
|
1410
|
+
else:
|
|
1411
|
+
cache.neighbor_deg_sum = None
|
|
1412
|
+
deg_sum = None
|
|
1413
|
+
else:
|
|
1414
|
+
x = [0.0] * n
|
|
1415
|
+
y = [0.0] * n
|
|
1416
|
+
epi_sum = [0.0] * n
|
|
1417
|
+
vf_sum = [0.0] * n
|
|
1418
|
+
count = [0.0] * n
|
|
1419
|
+
deg_sum = [0.0] * n if w_topo != 0.0 else None
|
|
461
1420
|
deg_list = data.get("deg_list")
|
|
462
|
-
if w_topo != 0 and deg_list is not None:
|
|
463
|
-
|
|
464
|
-
degs = list(deg_list)
|
|
1421
|
+
if w_topo != 0.0 and deg_list is not None:
|
|
1422
|
+
degs = deg_list
|
|
465
1423
|
else:
|
|
466
|
-
deg_sum = None
|
|
467
1424
|
degs = None
|
|
468
1425
|
return x, y, epi_sum, vf_sum, count, deg_sum, degs
|
|
469
1426
|
|
|
470
1427
|
|
|
471
|
-
def
|
|
472
|
-
|
|
1428
|
+
def _prefer_sparse_accumulation(n: int, edge_count: int | None) -> bool:
|
|
1429
|
+
"""Return ``True`` when neighbour sums should use edge accumulation."""
|
|
1430
|
+
|
|
1431
|
+
if n <= 1 or not edge_count:
|
|
1432
|
+
return False
|
|
1433
|
+
possible_edges = n * (n - 1)
|
|
1434
|
+
if possible_edges <= 0:
|
|
1435
|
+
return False
|
|
1436
|
+
density = edge_count / possible_edges
|
|
1437
|
+
return density <= _SPARSE_DENSITY_THRESHOLD
|
|
1438
|
+
|
|
1439
|
+
|
|
1440
|
+
def _accumulate_neighbors_dense(
|
|
1441
|
+
G: TNFRGraph,
|
|
1442
|
+
data: MutableMapping[str, Any],
|
|
1443
|
+
*,
|
|
1444
|
+
x: np.ndarray,
|
|
1445
|
+
y: np.ndarray,
|
|
1446
|
+
epi_sum: np.ndarray,
|
|
1447
|
+
vf_sum: np.ndarray,
|
|
1448
|
+
count: np.ndarray,
|
|
1449
|
+
deg_sum: np.ndarray | None,
|
|
1450
|
+
np: ModuleType,
|
|
1451
|
+
) -> NeighborStats:
|
|
1452
|
+
"""Vectorised neighbour accumulation using a dense adjacency matrix."""
|
|
1453
|
+
|
|
473
1454
|
nodes = data["nodes"]
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
1455
|
+
if not nodes:
|
|
1456
|
+
return x, y, epi_sum, vf_sum, count, deg_sum, None
|
|
1457
|
+
|
|
1458
|
+
A = data.get("A")
|
|
1459
|
+
if A is None:
|
|
1460
|
+
return _accumulate_neighbors_numpy(
|
|
1461
|
+
G,
|
|
1462
|
+
data,
|
|
1463
|
+
x=x,
|
|
1464
|
+
y=y,
|
|
1465
|
+
epi_sum=epi_sum,
|
|
1466
|
+
vf_sum=vf_sum,
|
|
1467
|
+
count=count,
|
|
1468
|
+
deg_sum=deg_sum,
|
|
1469
|
+
np=np,
|
|
1470
|
+
)
|
|
1471
|
+
|
|
1472
|
+
cache: DnfrCache | None = data.get("cache")
|
|
1473
|
+
n = len(nodes)
|
|
1474
|
+
|
|
1475
|
+
state = _ensure_numpy_state_vectors(data, np)
|
|
1476
|
+
vectors = [state["cos"], state["sin"], state["epi"], state["vf"]]
|
|
1477
|
+
|
|
1478
|
+
components = _ensure_cached_array(cache, "dense_components_np", (n, 4), np)
|
|
1479
|
+
for col, src_vec in enumerate(vectors):
|
|
1480
|
+
np.copyto(components[:, col], src_vec, casting="unsafe")
|
|
1481
|
+
|
|
1482
|
+
accum = _ensure_cached_array(cache, "dense_accum_np", (n, 4), np)
|
|
1483
|
+
np.matmul(A, components, out=accum)
|
|
1484
|
+
|
|
1485
|
+
np.copyto(x, accum[:, 0], casting="unsafe")
|
|
1486
|
+
np.copyto(y, accum[:, 1], casting="unsafe")
|
|
1487
|
+
np.copyto(epi_sum, accum[:, 2], casting="unsafe")
|
|
1488
|
+
np.copyto(vf_sum, accum[:, 3], casting="unsafe")
|
|
1489
|
+
|
|
1490
|
+
degree_counts = data.get("dense_degree_np")
|
|
1491
|
+
if degree_counts is None or getattr(degree_counts, "shape", (0,))[0] != n:
|
|
1492
|
+
degree_counts = None
|
|
1493
|
+
if degree_counts is None and cache is not None:
|
|
1494
|
+
cached_counts = cache.dense_degree_np
|
|
1495
|
+
if cached_counts is not None and getattr(cached_counts, "shape", (0,))[0] == n:
|
|
1496
|
+
degree_counts = cached_counts
|
|
1497
|
+
if degree_counts is None:
|
|
1498
|
+
degree_counts = A.sum(axis=1)
|
|
1499
|
+
if cache is not None:
|
|
1500
|
+
cache.dense_degree_np = degree_counts
|
|
1501
|
+
data["dense_degree_np"] = degree_counts
|
|
1502
|
+
np.copyto(count, degree_counts, casting="unsafe")
|
|
1503
|
+
|
|
1504
|
+
degs = None
|
|
1505
|
+
if deg_sum is not None:
|
|
1506
|
+
deg_array = data.get("deg_array")
|
|
1507
|
+
if deg_array is None:
|
|
1508
|
+
deg_array = _resolve_numpy_degree_array(
|
|
1509
|
+
data,
|
|
1510
|
+
count,
|
|
1511
|
+
cache=cache,
|
|
1512
|
+
np=np,
|
|
479
1513
|
)
|
|
1514
|
+
if deg_array is None:
|
|
1515
|
+
deg_sum.fill(0.0)
|
|
1516
|
+
else:
|
|
1517
|
+
np.matmul(A, deg_array, out=deg_sum)
|
|
1518
|
+
degs = deg_array
|
|
1519
|
+
|
|
1520
|
+
return x, y, epi_sum, vf_sum, count, deg_sum, degs
|
|
1521
|
+
|
|
1522
|
+
|
|
1523
|
+
def _accumulate_neighbors_broadcasted(
|
|
1524
|
+
*,
|
|
1525
|
+
edge_src: np.ndarray,
|
|
1526
|
+
edge_dst: np.ndarray,
|
|
1527
|
+
cos: np.ndarray,
|
|
1528
|
+
sin: np.ndarray,
|
|
1529
|
+
epi: np.ndarray,
|
|
1530
|
+
vf: np.ndarray,
|
|
1531
|
+
x: np.ndarray,
|
|
1532
|
+
y: np.ndarray,
|
|
1533
|
+
epi_sum: np.ndarray,
|
|
1534
|
+
vf_sum: np.ndarray,
|
|
1535
|
+
count: np.ndarray | None,
|
|
1536
|
+
deg_sum: np.ndarray | None,
|
|
1537
|
+
deg_array: np.ndarray | None,
|
|
1538
|
+
cache: DnfrCache | None,
|
|
1539
|
+
np: ModuleType,
|
|
1540
|
+
) -> dict[str, np.ndarray]:
|
|
1541
|
+
"""Accumulate neighbour contributions using direct indexed reductions."""
|
|
1542
|
+
|
|
1543
|
+
n = x.shape[0]
|
|
1544
|
+
edge_count = int(edge_src.size)
|
|
1545
|
+
|
|
1546
|
+
include_count = count is not None
|
|
1547
|
+
use_topology = deg_sum is not None and deg_array is not None
|
|
1548
|
+
|
|
1549
|
+
component_rows = 4 + (1 if include_count else 0) + (1 if use_topology else 0)
|
|
1550
|
+
|
|
1551
|
+
if cache is not None:
|
|
1552
|
+
base_signature = (id(edge_src), id(edge_dst), n, edge_count)
|
|
1553
|
+
cache.edge_signature = base_signature
|
|
1554
|
+
signature = (base_signature, component_rows)
|
|
1555
|
+
previous_signature = cache.neighbor_accum_signature
|
|
1556
|
+
|
|
1557
|
+
accum = cache.neighbor_accum_np
|
|
1558
|
+
if (
|
|
1559
|
+
accum is None
|
|
1560
|
+
or getattr(accum, "shape", None) != (component_rows, n)
|
|
1561
|
+
or previous_signature != signature
|
|
1562
|
+
):
|
|
1563
|
+
accum = np.zeros((component_rows, n), dtype=float)
|
|
1564
|
+
cache.neighbor_accum_np = accum
|
|
1565
|
+
else:
|
|
1566
|
+
accum.fill(0.0)
|
|
1567
|
+
|
|
1568
|
+
edge_values = cache.neighbor_edge_values_np
|
|
1569
|
+
if (
|
|
1570
|
+
edge_values is None
|
|
1571
|
+
or getattr(edge_values, "shape", None) != (edge_count,)
|
|
1572
|
+
):
|
|
1573
|
+
edge_values = np.empty((edge_count,), dtype=float)
|
|
1574
|
+
cache.neighbor_edge_values_np = edge_values
|
|
1575
|
+
|
|
1576
|
+
cache.neighbor_accum_signature = signature
|
|
1577
|
+
else:
|
|
1578
|
+
accum = np.zeros((component_rows, n), dtype=float)
|
|
1579
|
+
edge_values = (
|
|
1580
|
+
np.empty((edge_count,), dtype=float)
|
|
1581
|
+
if edge_count
|
|
1582
|
+
else np.empty((0,), dtype=float)
|
|
1583
|
+
)
|
|
1584
|
+
|
|
1585
|
+
if edge_count:
|
|
1586
|
+
row = 0
|
|
1587
|
+
|
|
1588
|
+
np.take(cos, edge_dst, out=edge_values)
|
|
1589
|
+
np.add.at(accum[row], edge_src, edge_values)
|
|
1590
|
+
row += 1
|
|
1591
|
+
|
|
1592
|
+
np.take(sin, edge_dst, out=edge_values)
|
|
1593
|
+
np.add.at(accum[row], edge_src, edge_values)
|
|
1594
|
+
row += 1
|
|
1595
|
+
|
|
1596
|
+
np.take(epi, edge_dst, out=edge_values)
|
|
1597
|
+
np.add.at(accum[row], edge_src, edge_values)
|
|
1598
|
+
row += 1
|
|
1599
|
+
|
|
1600
|
+
np.take(vf, edge_dst, out=edge_values)
|
|
1601
|
+
np.add.at(accum[row], edge_src, edge_values)
|
|
1602
|
+
row += 1
|
|
1603
|
+
|
|
1604
|
+
if include_count and count is not None:
|
|
1605
|
+
edge_values.fill(1.0)
|
|
1606
|
+
np.add.at(accum[row], edge_src, edge_values)
|
|
1607
|
+
row += 1
|
|
1608
|
+
|
|
1609
|
+
if use_topology and deg_sum is not None and deg_array is not None:
|
|
1610
|
+
np.take(deg_array, edge_dst, out=edge_values)
|
|
1611
|
+
np.add.at(accum[row], edge_src, edge_values)
|
|
1612
|
+
else:
|
|
1613
|
+
accum.fill(0.0)
|
|
1614
|
+
|
|
1615
|
+
row = 0
|
|
1616
|
+
np.copyto(x, accum[row], casting="unsafe")
|
|
1617
|
+
row += 1
|
|
1618
|
+
np.copyto(y, accum[row], casting="unsafe")
|
|
1619
|
+
row += 1
|
|
1620
|
+
np.copyto(epi_sum, accum[row], casting="unsafe")
|
|
1621
|
+
row += 1
|
|
1622
|
+
np.copyto(vf_sum, accum[row], casting="unsafe")
|
|
1623
|
+
row += 1
|
|
1624
|
+
|
|
1625
|
+
if include_count and count is not None:
|
|
1626
|
+
np.copyto(count, accum[row], casting="unsafe")
|
|
1627
|
+
row += 1
|
|
1628
|
+
|
|
1629
|
+
if use_topology and deg_sum is not None:
|
|
1630
|
+
np.copyto(deg_sum, accum[row], casting="unsafe")
|
|
1631
|
+
|
|
1632
|
+
return {
|
|
1633
|
+
"accumulator": accum,
|
|
1634
|
+
"edge_values": edge_values,
|
|
1635
|
+
}
|
|
1636
|
+
|
|
1637
|
+
|
|
1638
|
+
def _build_neighbor_sums_common(
|
|
1639
|
+
G: TNFRGraph,
|
|
1640
|
+
data: MutableMapping[str, Any],
|
|
1641
|
+
*,
|
|
1642
|
+
use_numpy: bool,
|
|
1643
|
+
n_jobs: int | None = None,
|
|
1644
|
+
) -> NeighborStats:
|
|
1645
|
+
"""Build neighbour accumulators honouring cached NumPy buffers when possible."""
|
|
1646
|
+
|
|
1647
|
+
nodes = data["nodes"]
|
|
1648
|
+
cache: DnfrCache | None = data.get("cache")
|
|
1649
|
+
np_module = get_numpy()
|
|
1650
|
+
has_numpy_buffers = _has_cached_numpy_buffers(data, cache)
|
|
1651
|
+
if use_numpy and np_module is None and has_numpy_buffers:
|
|
1652
|
+
np_module = sys.modules.get("numpy")
|
|
1653
|
+
|
|
1654
|
+
if np_module is not None:
|
|
480
1655
|
if not nodes:
|
|
481
|
-
return
|
|
1656
|
+
return _init_neighbor_sums(data, np=np_module)
|
|
1657
|
+
|
|
482
1658
|
x, y, epi_sum, vf_sum, count, deg_sum, degs = _init_neighbor_sums(
|
|
483
|
-
data, np=
|
|
1659
|
+
data, np=np_module
|
|
484
1660
|
)
|
|
1661
|
+
prefer_sparse = data.get("prefer_sparse")
|
|
1662
|
+
if prefer_sparse is None:
|
|
1663
|
+
prefer_sparse = _prefer_sparse_accumulation(
|
|
1664
|
+
len(nodes), data.get("edge_count")
|
|
1665
|
+
)
|
|
1666
|
+
data["prefer_sparse"] = prefer_sparse
|
|
1667
|
+
|
|
1668
|
+
use_dense = False
|
|
485
1669
|
A = data.get("A")
|
|
486
|
-
if A is None:
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
if
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
data
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
y[:] = A @ sin_th
|
|
510
|
-
epi_sum[:] = A @ epi
|
|
511
|
-
vf_sum[:] = A @ vf
|
|
512
|
-
count[:] = A.sum(axis=1)
|
|
513
|
-
if w_topo != 0.0:
|
|
514
|
-
deg_array = data.get("deg_array")
|
|
515
|
-
if deg_array is None:
|
|
516
|
-
deg_list = data.get("deg_list")
|
|
517
|
-
if deg_list is not None:
|
|
518
|
-
deg_array = np.array(deg_list, dtype=float)
|
|
519
|
-
data["deg_array"] = deg_array
|
|
520
|
-
if cache is not None:
|
|
521
|
-
cache.deg_array = deg_array
|
|
522
|
-
else:
|
|
523
|
-
deg_array = count
|
|
524
|
-
deg_sum[:] = A @ deg_array
|
|
525
|
-
degs = deg_array
|
|
526
|
-
return x, y, epi_sum, vf_sum, count, deg_sum, degs
|
|
527
|
-
else:
|
|
528
|
-
x, y, epi_sum, vf_sum, count, deg_sum, degs_list = _init_neighbor_sums(
|
|
529
|
-
data
|
|
1670
|
+
if use_numpy and not prefer_sparse and A is not None:
|
|
1671
|
+
shape = getattr(A, "shape", (0, 0))
|
|
1672
|
+
use_dense = shape[0] == len(nodes) and shape[1] == len(nodes)
|
|
1673
|
+
if use_numpy and data.get("dense_override") and A is not None:
|
|
1674
|
+
shape = getattr(A, "shape", (0, 0))
|
|
1675
|
+
if shape[0] == len(nodes) and shape[1] == len(nodes):
|
|
1676
|
+
use_dense = True
|
|
1677
|
+
|
|
1678
|
+
if use_dense:
|
|
1679
|
+
accumulator = _accumulate_neighbors_dense
|
|
1680
|
+
else:
|
|
1681
|
+
_ensure_numpy_state_vectors(data, np_module)
|
|
1682
|
+
accumulator = _accumulate_neighbors_numpy
|
|
1683
|
+
return accumulator(
|
|
1684
|
+
G,
|
|
1685
|
+
data,
|
|
1686
|
+
x=x,
|
|
1687
|
+
y=y,
|
|
1688
|
+
epi_sum=epi_sum,
|
|
1689
|
+
vf_sum=vf_sum,
|
|
1690
|
+
count=count,
|
|
1691
|
+
deg_sum=deg_sum,
|
|
1692
|
+
np=np_module,
|
|
530
1693
|
)
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
1694
|
+
|
|
1695
|
+
if not nodes:
|
|
1696
|
+
return _init_neighbor_sums(data)
|
|
1697
|
+
|
|
1698
|
+
x, y, epi_sum, vf_sum, count, deg_sum, degs_list = _init_neighbor_sums(data)
|
|
1699
|
+
idx = data["idx"]
|
|
1700
|
+
epi = data["epi"]
|
|
1701
|
+
vf = data["vf"]
|
|
1702
|
+
cos_th = data["cos_theta"]
|
|
1703
|
+
sin_th = data["sin_theta"]
|
|
1704
|
+
deg_list = data.get("deg_list")
|
|
1705
|
+
|
|
1706
|
+
effective_jobs = _resolve_parallel_jobs(n_jobs, len(nodes))
|
|
1707
|
+
if effective_jobs:
|
|
1708
|
+
neighbor_indices: list[list[int]] = []
|
|
1709
|
+
for node in nodes:
|
|
1710
|
+
indices: list[int] = []
|
|
539
1711
|
for v in G.neighbors(node):
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
1712
|
+
indices.append(idx[v])
|
|
1713
|
+
neighbor_indices.append(indices)
|
|
1714
|
+
|
|
1715
|
+
chunk_results = []
|
|
1716
|
+
with ProcessPoolExecutor(max_workers=effective_jobs) as executor:
|
|
1717
|
+
futures = []
|
|
1718
|
+
for start, end in _iter_chunk_offsets(len(nodes), effective_jobs):
|
|
1719
|
+
if start == end:
|
|
1720
|
+
continue
|
|
1721
|
+
futures.append(
|
|
1722
|
+
executor.submit(
|
|
1723
|
+
_neighbor_sums_worker,
|
|
1724
|
+
start,
|
|
1725
|
+
end,
|
|
1726
|
+
neighbor_indices,
|
|
1727
|
+
cos_th,
|
|
1728
|
+
sin_th,
|
|
1729
|
+
epi,
|
|
1730
|
+
vf,
|
|
1731
|
+
x[start:end],
|
|
1732
|
+
y[start:end],
|
|
1733
|
+
epi_sum[start:end],
|
|
1734
|
+
vf_sum[start:end],
|
|
1735
|
+
count[start:end],
|
|
1736
|
+
deg_sum[start:end] if deg_sum is not None else None,
|
|
1737
|
+
deg_list,
|
|
1738
|
+
degs_list,
|
|
1739
|
+
)
|
|
1740
|
+
)
|
|
1741
|
+
for future in futures:
|
|
1742
|
+
chunk_results.append(future.result())
|
|
1743
|
+
|
|
1744
|
+
for start, chunk_x, chunk_y, chunk_epi, chunk_vf, chunk_count, chunk_deg in sorted(
|
|
1745
|
+
chunk_results, key=lambda item: item[0]
|
|
1746
|
+
):
|
|
1747
|
+
end = start + len(chunk_x)
|
|
1748
|
+
x[start:end] = chunk_x
|
|
1749
|
+
y[start:end] = chunk_y
|
|
1750
|
+
epi_sum[start:end] = chunk_epi
|
|
1751
|
+
vf_sum[start:end] = chunk_vf
|
|
1752
|
+
count[start:end] = chunk_count
|
|
1753
|
+
if deg_sum is not None and chunk_deg is not None:
|
|
1754
|
+
deg_sum[start:end] = chunk_deg
|
|
548
1755
|
return x, y, epi_sum, vf_sum, count, deg_sum, degs_list
|
|
549
1756
|
|
|
1757
|
+
for i, node in enumerate(nodes):
|
|
1758
|
+
deg_i = degs_list[i] if degs_list is not None else 0.0
|
|
1759
|
+
x_i = x[i]
|
|
1760
|
+
y_i = y[i]
|
|
1761
|
+
epi_i = epi_sum[i]
|
|
1762
|
+
vf_i = vf_sum[i]
|
|
1763
|
+
count_i = count[i]
|
|
1764
|
+
deg_acc = deg_sum[i] if deg_sum is not None else 0.0
|
|
1765
|
+
for v in G.neighbors(node):
|
|
1766
|
+
j = idx[v]
|
|
1767
|
+
cos_j = cos_th[j]
|
|
1768
|
+
sin_j = sin_th[j]
|
|
1769
|
+
epi_j = epi[j]
|
|
1770
|
+
vf_j = vf[j]
|
|
1771
|
+
x_i += cos_j
|
|
1772
|
+
y_i += sin_j
|
|
1773
|
+
epi_i += epi_j
|
|
1774
|
+
vf_i += vf_j
|
|
1775
|
+
count_i += 1
|
|
1776
|
+
if deg_sum is not None:
|
|
1777
|
+
deg_acc += deg_list[j] if deg_list is not None else deg_i
|
|
1778
|
+
x[i] = x_i
|
|
1779
|
+
y[i] = y_i
|
|
1780
|
+
epi_sum[i] = epi_i
|
|
1781
|
+
vf_sum[i] = vf_i
|
|
1782
|
+
count[i] = count_i
|
|
1783
|
+
if deg_sum is not None:
|
|
1784
|
+
deg_sum[i] = deg_acc
|
|
1785
|
+
return x, y, epi_sum, vf_sum, count, deg_sum, degs_list
|
|
1786
|
+
|
|
1787
|
+
|
|
1788
|
+
def _accumulate_neighbors_numpy(
|
|
1789
|
+
G: TNFRGraph,
|
|
1790
|
+
data: MutableMapping[str, Any],
|
|
1791
|
+
*,
|
|
1792
|
+
x: np.ndarray,
|
|
1793
|
+
y: np.ndarray,
|
|
1794
|
+
epi_sum: np.ndarray,
|
|
1795
|
+
vf_sum: np.ndarray,
|
|
1796
|
+
count: np.ndarray | None,
|
|
1797
|
+
deg_sum: np.ndarray | None,
|
|
1798
|
+
np: ModuleType,
|
|
1799
|
+
) -> NeighborStats:
|
|
1800
|
+
"""Vectorised neighbour accumulation reusing cached NumPy buffers."""
|
|
1801
|
+
|
|
1802
|
+
nodes = data["nodes"]
|
|
1803
|
+
if not nodes:
|
|
1804
|
+
return x, y, epi_sum, vf_sum, count, deg_sum, None
|
|
1805
|
+
|
|
1806
|
+
cache: DnfrCache | None = data.get("cache")
|
|
1807
|
+
|
|
1808
|
+
state = _ensure_numpy_state_vectors(data, np)
|
|
1809
|
+
cos_th = state["cos"]
|
|
1810
|
+
sin_th = state["sin"]
|
|
1811
|
+
epi = state["epi"]
|
|
1812
|
+
vf = state["vf"]
|
|
1813
|
+
|
|
1814
|
+
edge_src = data.get("edge_src")
|
|
1815
|
+
edge_dst = data.get("edge_dst")
|
|
1816
|
+
if edge_src is None or edge_dst is None:
|
|
1817
|
+
edge_src, edge_dst = _build_edge_index_arrays(G, nodes, data["idx"], np)
|
|
1818
|
+
data["edge_src"] = edge_src
|
|
1819
|
+
data["edge_dst"] = edge_dst
|
|
1820
|
+
if cache is not None:
|
|
1821
|
+
cache.edge_src = edge_src
|
|
1822
|
+
cache.edge_dst = edge_dst
|
|
1823
|
+
if edge_src is not None:
|
|
1824
|
+
data["edge_count"] = int(edge_src.size)
|
|
1825
|
+
|
|
1826
|
+
cached_deg_array = data.get("deg_array")
|
|
1827
|
+
reuse_count_from_deg = False
|
|
1828
|
+
if count is not None:
|
|
1829
|
+
if cached_deg_array is not None:
|
|
1830
|
+
np.copyto(count, cached_deg_array, casting="unsafe")
|
|
1831
|
+
reuse_count_from_deg = True
|
|
1832
|
+
else:
|
|
1833
|
+
count.fill(0.0)
|
|
1834
|
+
|
|
1835
|
+
deg_array = None
|
|
1836
|
+
if deg_sum is not None:
|
|
1837
|
+
deg_sum.fill(0.0)
|
|
1838
|
+
deg_array = _resolve_numpy_degree_array(
|
|
1839
|
+
data, count if count is not None else None, cache=cache, np=np
|
|
1840
|
+
)
|
|
1841
|
+
elif cached_deg_array is not None:
|
|
1842
|
+
deg_array = cached_deg_array
|
|
1843
|
+
|
|
1844
|
+
accum = _accumulate_neighbors_broadcasted(
|
|
1845
|
+
edge_src=edge_src,
|
|
1846
|
+
edge_dst=edge_dst,
|
|
1847
|
+
cos=cos_th,
|
|
1848
|
+
sin=sin_th,
|
|
1849
|
+
epi=epi,
|
|
1850
|
+
vf=vf,
|
|
1851
|
+
x=x,
|
|
1852
|
+
y=y,
|
|
1853
|
+
epi_sum=epi_sum,
|
|
1854
|
+
vf_sum=vf_sum,
|
|
1855
|
+
count=count,
|
|
1856
|
+
deg_sum=deg_sum,
|
|
1857
|
+
deg_array=deg_array,
|
|
1858
|
+
cache=cache,
|
|
1859
|
+
np=np,
|
|
1860
|
+
)
|
|
1861
|
+
|
|
1862
|
+
data["neighbor_accum_np"] = accum.get("accumulator")
|
|
1863
|
+
data["neighbor_edge_values_np"] = accum.get("edge_values")
|
|
1864
|
+
if cache is not None:
|
|
1865
|
+
data["neighbor_accum_signature"] = cache.neighbor_accum_signature
|
|
1866
|
+
if reuse_count_from_deg and count is not None and cached_deg_array is not None:
|
|
1867
|
+
np.copyto(count, cached_deg_array, casting="unsafe")
|
|
1868
|
+
degs = deg_array if deg_sum is not None and deg_array is not None else None
|
|
1869
|
+
return x, y, epi_sum, vf_sum, count, deg_sum, degs
|
|
1870
|
+
|
|
550
1871
|
|
|
551
|
-
def _compute_dnfr(
|
|
1872
|
+
def _compute_dnfr(
|
|
1873
|
+
G: TNFRGraph,
|
|
1874
|
+
data: MutableMapping[str, Any],
|
|
1875
|
+
*,
|
|
1876
|
+
use_numpy: bool | None = None,
|
|
1877
|
+
n_jobs: int | None = None,
|
|
1878
|
+
) -> None:
|
|
552
1879
|
"""Compute ΔNFR using neighbour sums.
|
|
553
1880
|
|
|
554
1881
|
Parameters
|
|
@@ -557,11 +1884,38 @@ def _compute_dnfr(G, data, *, use_numpy: bool = False) -> None:
|
|
|
557
1884
|
Graph on which the computation is performed.
|
|
558
1885
|
data : dict
|
|
559
1886
|
Precomputed ΔNFR data as returned by :func:`_prepare_dnfr_data`.
|
|
560
|
-
use_numpy : bool, optional
|
|
561
|
-
When ``True`` the
|
|
562
|
-
|
|
1887
|
+
use_numpy : bool | None, optional
|
|
1888
|
+
Backwards compatibility flag. When ``True`` the function eagerly
|
|
1889
|
+
prepares NumPy buffers (if available). When ``False`` the engine still
|
|
1890
|
+
prefers the vectorised path whenever :func:`get_numpy` returns a module
|
|
1891
|
+
and the graph does not set ``vectorized_dnfr`` to ``False``.
|
|
563
1892
|
"""
|
|
564
|
-
|
|
1893
|
+
np_module = get_numpy()
|
|
1894
|
+
data["dnfr_numpy_available"] = bool(np_module)
|
|
1895
|
+
vector_disabled = G.graph.get("vectorized_dnfr") is False
|
|
1896
|
+
prefer_dense = np_module is not None and not vector_disabled
|
|
1897
|
+
if use_numpy is True and np_module is not None:
|
|
1898
|
+
prefer_dense = True
|
|
1899
|
+
if use_numpy is False or vector_disabled:
|
|
1900
|
+
prefer_dense = False
|
|
1901
|
+
data["dnfr_used_numpy"] = bool(prefer_dense and np_module is not None)
|
|
1902
|
+
|
|
1903
|
+
data["n_jobs"] = n_jobs
|
|
1904
|
+
try:
|
|
1905
|
+
res = _build_neighbor_sums_common(
|
|
1906
|
+
G,
|
|
1907
|
+
data,
|
|
1908
|
+
use_numpy=prefer_dense,
|
|
1909
|
+
n_jobs=n_jobs,
|
|
1910
|
+
)
|
|
1911
|
+
except TypeError as exc:
|
|
1912
|
+
if "n_jobs" not in str(exc):
|
|
1913
|
+
raise
|
|
1914
|
+
res = _build_neighbor_sums_common(
|
|
1915
|
+
G,
|
|
1916
|
+
data,
|
|
1917
|
+
use_numpy=prefer_dense,
|
|
1918
|
+
)
|
|
565
1919
|
if res is None:
|
|
566
1920
|
return
|
|
567
1921
|
x, y, epi_sum, vf_sum, count, deg_sum, degs = res
|
|
@@ -575,10 +1929,16 @@ def _compute_dnfr(G, data, *, use_numpy: bool = False) -> None:
|
|
|
575
1929
|
count=count,
|
|
576
1930
|
deg_sum=deg_sum,
|
|
577
1931
|
degs=degs,
|
|
1932
|
+
n_jobs=n_jobs,
|
|
578
1933
|
)
|
|
579
1934
|
|
|
580
1935
|
|
|
581
|
-
def default_compute_delta_nfr(
|
|
1936
|
+
def default_compute_delta_nfr(
|
|
1937
|
+
G: TNFRGraph,
|
|
1938
|
+
*,
|
|
1939
|
+
cache_size: int | None = 1,
|
|
1940
|
+
n_jobs: int | None = None,
|
|
1941
|
+
) -> None:
|
|
582
1942
|
"""Compute ΔNFR by mixing phase, EPI, νf and a topological term.
|
|
583
1943
|
|
|
584
1944
|
Parameters
|
|
@@ -589,6 +1949,10 @@ def default_compute_delta_nfr(G, *, cache_size: int | None = 1) -> None:
|
|
|
589
1949
|
Maximum number of edge configurations cached in ``G.graph``. Values
|
|
590
1950
|
``None`` or <= 0 imply unlimited cache. Defaults to ``1`` to keep the
|
|
591
1951
|
previous behaviour.
|
|
1952
|
+
n_jobs : int | None, optional
|
|
1953
|
+
Parallel worker count for the pure-Python accumulation path. ``None``
|
|
1954
|
+
or values <= 1 preserve the serial behaviour. The vectorised NumPy
|
|
1955
|
+
branch ignores this parameter as it already operates in bulk.
|
|
592
1956
|
"""
|
|
593
1957
|
data = _prepare_dnfr_data(G, cache_size=cache_size)
|
|
594
1958
|
_write_dnfr_metadata(
|
|
@@ -596,19 +1960,62 @@ def default_compute_delta_nfr(G, *, cache_size: int | None = 1) -> None:
|
|
|
596
1960
|
weights=data["weights"],
|
|
597
1961
|
hook_name="default_compute_delta_nfr",
|
|
598
1962
|
)
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
1963
|
+
_compute_dnfr(G, data, n_jobs=n_jobs)
|
|
1964
|
+
if not data.get("dnfr_numpy_available"):
|
|
1965
|
+
cache = data.get("cache")
|
|
1966
|
+
if isinstance(cache, DnfrCache):
|
|
1967
|
+
for attr in (
|
|
1968
|
+
"neighbor_x_np",
|
|
1969
|
+
"neighbor_y_np",
|
|
1970
|
+
"neighbor_epi_sum_np",
|
|
1971
|
+
"neighbor_vf_sum_np",
|
|
1972
|
+
"neighbor_count_np",
|
|
1973
|
+
"neighbor_deg_sum_np",
|
|
1974
|
+
"neighbor_inv_count_np",
|
|
1975
|
+
"neighbor_cos_avg_np",
|
|
1976
|
+
"neighbor_sin_avg_np",
|
|
1977
|
+
"neighbor_mean_tmp_np",
|
|
1978
|
+
"neighbor_mean_length_np",
|
|
1979
|
+
"neighbor_accum_np",
|
|
1980
|
+
"neighbor_edge_values_np",
|
|
1981
|
+
):
|
|
1982
|
+
setattr(cache, attr, None)
|
|
1983
|
+
cache.neighbor_accum_signature = None
|
|
602
1984
|
|
|
603
1985
|
|
|
604
1986
|
def set_delta_nfr_hook(
|
|
605
|
-
G
|
|
1987
|
+
G: TNFRGraph,
|
|
1988
|
+
func: DeltaNFRHook,
|
|
1989
|
+
*,
|
|
1990
|
+
name: str | None = None,
|
|
1991
|
+
note: str | None = None,
|
|
606
1992
|
) -> None:
|
|
607
1993
|
"""Set a stable hook to compute ΔNFR.
|
|
608
|
-
|
|
609
|
-
|
|
1994
|
+
|
|
1995
|
+
The callable should accept ``(G, *[, n_jobs])`` and is responsible for
|
|
1996
|
+
writing ``ALIAS_DNFR`` in each node. ``n_jobs`` is optional and ignored by
|
|
1997
|
+
hooks that do not support parallel execution. Basic metadata in
|
|
1998
|
+
``G.graph`` is updated accordingly.
|
|
610
1999
|
"""
|
|
611
|
-
|
|
2000
|
+
|
|
2001
|
+
def _wrapped(graph: TNFRGraph, *args: Any, **kwargs: Any) -> None:
|
|
2002
|
+
if "n_jobs" in kwargs:
|
|
2003
|
+
try:
|
|
2004
|
+
func(graph, *args, **kwargs)
|
|
2005
|
+
return
|
|
2006
|
+
except TypeError as exc:
|
|
2007
|
+
if "n_jobs" not in str(exc):
|
|
2008
|
+
raise
|
|
2009
|
+
kwargs = dict(kwargs)
|
|
2010
|
+
kwargs.pop("n_jobs", None)
|
|
2011
|
+
func(graph, *args, **kwargs)
|
|
2012
|
+
return
|
|
2013
|
+
func(graph, *args, **kwargs)
|
|
2014
|
+
|
|
2015
|
+
_wrapped.__name__ = getattr(func, "__name__", "custom_dnfr")
|
|
2016
|
+
_wrapped.__doc__ = getattr(func, "__doc__", _wrapped.__doc__)
|
|
2017
|
+
|
|
2018
|
+
G.graph["compute_delta_nfr"] = _wrapped
|
|
612
2019
|
G.graph["_dnfr_hook_name"] = str(
|
|
613
2020
|
name or getattr(func, "__name__", "custom_dnfr")
|
|
614
2021
|
)
|
|
@@ -620,114 +2027,289 @@ def set_delta_nfr_hook(
|
|
|
620
2027
|
G.graph["_DNFR_META"] = meta
|
|
621
2028
|
|
|
622
2029
|
|
|
2030
|
+
def _dnfr_hook_chunk_worker(
|
|
2031
|
+
G: TNFRGraph,
|
|
2032
|
+
node_ids: Sequence[NodeId],
|
|
2033
|
+
grad_items: tuple[
|
|
2034
|
+
tuple[str, Callable[[TNFRGraph, NodeId, Mapping[str, Any]], float]],
|
|
2035
|
+
...,
|
|
2036
|
+
],
|
|
2037
|
+
weights: Mapping[str, float],
|
|
2038
|
+
) -> list[tuple[NodeId, float]]:
|
|
2039
|
+
"""Compute weighted gradients for ``node_ids``.
|
|
2040
|
+
|
|
2041
|
+
The helper is defined at module level so it can be pickled by
|
|
2042
|
+
:class:`concurrent.futures.ProcessPoolExecutor`.
|
|
2043
|
+
"""
|
|
2044
|
+
|
|
2045
|
+
results: list[tuple[NodeId, float]] = []
|
|
2046
|
+
for node in node_ids:
|
|
2047
|
+
nd = G.nodes[node]
|
|
2048
|
+
total = 0.0
|
|
2049
|
+
for name, func in grad_items:
|
|
2050
|
+
w = weights.get(name, 0.0)
|
|
2051
|
+
if w:
|
|
2052
|
+
total += w * float(func(G, node, nd))
|
|
2053
|
+
results.append((node, total))
|
|
2054
|
+
return results
|
|
2055
|
+
|
|
2056
|
+
|
|
623
2057
|
def _apply_dnfr_hook(
|
|
624
|
-
G,
|
|
625
|
-
grads:
|
|
2058
|
+
G: TNFRGraph,
|
|
2059
|
+
grads: Mapping[str, Callable[[TNFRGraph, NodeId, Mapping[str, Any]], float]],
|
|
626
2060
|
*,
|
|
627
|
-
weights:
|
|
2061
|
+
weights: Mapping[str, float],
|
|
628
2062
|
hook_name: str,
|
|
629
2063
|
note: str | None = None,
|
|
2064
|
+
n_jobs: int | None = None,
|
|
630
2065
|
) -> None:
|
|
631
2066
|
"""Generic helper to compute and store ΔNFR using ``grads``.
|
|
632
2067
|
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
2068
|
+
Parameters
|
|
2069
|
+
----------
|
|
2070
|
+
G : nx.Graph
|
|
2071
|
+
Graph whose nodes will receive the ΔNFR update.
|
|
2072
|
+
grads : dict
|
|
2073
|
+
Mapping from component names to callables with signature
|
|
2074
|
+
``(G, node, data) -> float`` returning the gradient contribution.
|
|
2075
|
+
weights : dict
|
|
2076
|
+
Weight per component; missing entries default to ``0``.
|
|
2077
|
+
hook_name : str
|
|
2078
|
+
Friendly identifier stored in ``G.graph`` metadata.
|
|
2079
|
+
note : str | None, optional
|
|
2080
|
+
Additional documentation recorded next to the hook metadata.
|
|
2081
|
+
n_jobs : int | None, optional
|
|
2082
|
+
Optional worker count for the pure-Python execution path. When NumPy
|
|
2083
|
+
is available the helper always prefers the vectorised implementation
|
|
2084
|
+
and ignores ``n_jobs`` because the computation already happens in
|
|
2085
|
+
bulk.
|
|
636
2086
|
"""
|
|
637
2087
|
|
|
638
|
-
|
|
639
|
-
|
|
2088
|
+
nodes_data: list[tuple[NodeId, Mapping[str, Any]]] = list(G.nodes(data=True))
|
|
2089
|
+
if not nodes_data:
|
|
2090
|
+
_write_dnfr_metadata(G, weights=weights, hook_name=hook_name, note=note)
|
|
2091
|
+
return
|
|
2092
|
+
|
|
2093
|
+
np_module = cast(ModuleType | None, get_numpy())
|
|
2094
|
+
if np_module is not None:
|
|
2095
|
+
totals = np_module.zeros(len(nodes_data), dtype=float)
|
|
640
2096
|
for name, func in grads.items():
|
|
641
|
-
w = weights.get(name, 0.0)
|
|
642
|
-
if w:
|
|
643
|
-
|
|
644
|
-
|
|
2097
|
+
w = float(weights.get(name, 0.0))
|
|
2098
|
+
if w == 0.0:
|
|
2099
|
+
continue
|
|
2100
|
+
values = np_module.fromiter(
|
|
2101
|
+
(float(func(G, n, nd)) for n, nd in nodes_data),
|
|
2102
|
+
dtype=float,
|
|
2103
|
+
count=len(nodes_data),
|
|
2104
|
+
)
|
|
2105
|
+
if w == 1.0:
|
|
2106
|
+
np_module.add(totals, values, out=totals)
|
|
2107
|
+
else:
|
|
2108
|
+
np_module.add(totals, values * w, out=totals)
|
|
2109
|
+
for idx, (n, _) in enumerate(nodes_data):
|
|
2110
|
+
set_dnfr(G, n, float(totals[idx]))
|
|
2111
|
+
_write_dnfr_metadata(G, weights=weights, hook_name=hook_name, note=note)
|
|
2112
|
+
return
|
|
2113
|
+
|
|
2114
|
+
effective_jobs = _resolve_parallel_jobs(n_jobs, len(nodes_data))
|
|
2115
|
+
results: list[tuple[NodeId, float]] | None = None
|
|
2116
|
+
if effective_jobs:
|
|
2117
|
+
grad_items = tuple(grads.items())
|
|
2118
|
+
try:
|
|
2119
|
+
import pickle
|
|
2120
|
+
|
|
2121
|
+
pickle.dumps((grad_items, weights, G), protocol=pickle.HIGHEST_PROTOCOL)
|
|
2122
|
+
except Exception:
|
|
2123
|
+
effective_jobs = None
|
|
2124
|
+
else:
|
|
2125
|
+
chunk_results: list[tuple[NodeId, float]] = []
|
|
2126
|
+
with ProcessPoolExecutor(max_workers=effective_jobs) as executor:
|
|
2127
|
+
futures = []
|
|
2128
|
+
node_ids: list[NodeId] = [n for n, _ in nodes_data]
|
|
2129
|
+
for start, end in _iter_chunk_offsets(len(node_ids), effective_jobs):
|
|
2130
|
+
if start == end:
|
|
2131
|
+
continue
|
|
2132
|
+
futures.append(
|
|
2133
|
+
executor.submit(
|
|
2134
|
+
_dnfr_hook_chunk_worker,
|
|
2135
|
+
G,
|
|
2136
|
+
node_ids[start:end],
|
|
2137
|
+
grad_items,
|
|
2138
|
+
weights,
|
|
2139
|
+
)
|
|
2140
|
+
)
|
|
2141
|
+
for future in futures:
|
|
2142
|
+
chunk_results.extend(future.result())
|
|
2143
|
+
results = chunk_results
|
|
2144
|
+
|
|
2145
|
+
if results is None:
|
|
2146
|
+
results = []
|
|
2147
|
+
for n, nd in nodes_data:
|
|
2148
|
+
total = 0.0
|
|
2149
|
+
for name, func in grads.items():
|
|
2150
|
+
w = weights.get(name, 0.0)
|
|
2151
|
+
if w:
|
|
2152
|
+
total += w * float(func(G, n, nd))
|
|
2153
|
+
results.append((n, total))
|
|
2154
|
+
|
|
2155
|
+
for node, value in results:
|
|
2156
|
+
set_dnfr(G, node, float(value))
|
|
645
2157
|
|
|
646
2158
|
_write_dnfr_metadata(G, weights=weights, hook_name=hook_name, note=note)
|
|
647
2159
|
|
|
648
2160
|
|
|
649
2161
|
# --- Hooks de ejemplo (opcionales) ---
|
|
650
|
-
def dnfr_phase_only(G) -> None:
|
|
651
|
-
"""Example: ΔNFR from phase only (Kuramoto-like)."""
|
|
652
2162
|
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
2163
|
+
|
|
2164
|
+
class _PhaseGradient:
|
|
2165
|
+
"""Callable computing the phase contribution using cached trig values."""
|
|
2166
|
+
|
|
2167
|
+
__slots__ = ("cos", "sin")
|
|
2168
|
+
|
|
2169
|
+
def __init__(
|
|
2170
|
+
self,
|
|
2171
|
+
cos_map: Mapping[NodeId, float],
|
|
2172
|
+
sin_map: Mapping[NodeId, float],
|
|
2173
|
+
) -> None:
|
|
2174
|
+
self.cos: Mapping[NodeId, float] = cos_map
|
|
2175
|
+
self.sin: Mapping[NodeId, float] = sin_map
|
|
2176
|
+
|
|
2177
|
+
def __call__(
|
|
2178
|
+
self,
|
|
2179
|
+
G: TNFRGraph,
|
|
2180
|
+
n: NodeId,
|
|
2181
|
+
nd: Mapping[str, Any],
|
|
2182
|
+
) -> float:
|
|
2183
|
+
theta_val = get_theta_attr(nd, 0.0)
|
|
2184
|
+
th_i = float(theta_val if theta_val is not None else 0.0)
|
|
2185
|
+
neighbors = list(G.neighbors(n))
|
|
2186
|
+
if neighbors:
|
|
2187
|
+
th_bar = neighbor_phase_mean_list(
|
|
2188
|
+
neighbors,
|
|
2189
|
+
cos_th=self.cos,
|
|
2190
|
+
sin_th=self.sin,
|
|
2191
|
+
fallback=th_i,
|
|
2192
|
+
)
|
|
2193
|
+
else:
|
|
2194
|
+
th_bar = th_i
|
|
656
2195
|
return -angle_diff(th_i, th_bar) / math.pi
|
|
657
2196
|
|
|
2197
|
+
|
|
2198
|
+
class _NeighborAverageGradient:
|
|
2199
|
+
"""Callable computing neighbour averages for scalar attributes."""
|
|
2200
|
+
|
|
2201
|
+
__slots__ = ("alias", "values")
|
|
2202
|
+
|
|
2203
|
+
def __init__(
|
|
2204
|
+
self,
|
|
2205
|
+
alias: tuple[str, ...],
|
|
2206
|
+
values: MutableMapping[NodeId, float],
|
|
2207
|
+
) -> None:
|
|
2208
|
+
self.alias: tuple[str, ...] = alias
|
|
2209
|
+
self.values: MutableMapping[NodeId, float] = values
|
|
2210
|
+
|
|
2211
|
+
def __call__(
|
|
2212
|
+
self,
|
|
2213
|
+
G: TNFRGraph,
|
|
2214
|
+
n: NodeId,
|
|
2215
|
+
nd: Mapping[str, Any],
|
|
2216
|
+
) -> float:
|
|
2217
|
+
val = self.values.get(n)
|
|
2218
|
+
if val is None:
|
|
2219
|
+
val = float(get_attr(nd, self.alias, 0.0))
|
|
2220
|
+
self.values[n] = val
|
|
2221
|
+
neighbors = list(G.neighbors(n))
|
|
2222
|
+
if not neighbors:
|
|
2223
|
+
return 0.0
|
|
2224
|
+
total = 0.0
|
|
2225
|
+
for neigh in neighbors:
|
|
2226
|
+
neigh_val = self.values.get(neigh)
|
|
2227
|
+
if neigh_val is None:
|
|
2228
|
+
neigh_val = float(get_attr(G.nodes[neigh], self.alias, val))
|
|
2229
|
+
self.values[neigh] = neigh_val
|
|
2230
|
+
total += neigh_val
|
|
2231
|
+
return total / len(neighbors) - val
|
|
2232
|
+
|
|
2233
|
+
|
|
2234
|
+
def dnfr_phase_only(G: TNFRGraph, *, n_jobs: int | None = None) -> None:
|
|
2235
|
+
"""Example: ΔNFR from phase only (Kuramoto-like).
|
|
2236
|
+
|
|
2237
|
+
Parameters
|
|
2238
|
+
----------
|
|
2239
|
+
G : nx.Graph
|
|
2240
|
+
Graph whose nodes receive the ΔNFR assignment.
|
|
2241
|
+
n_jobs : int | None, optional
|
|
2242
|
+
Parallel worker hint used when NumPy is unavailable. Defaults to
|
|
2243
|
+
serial execution.
|
|
2244
|
+
"""
|
|
2245
|
+
|
|
2246
|
+
trig = compute_theta_trig(G.nodes(data=True))
|
|
2247
|
+
g_phase = _PhaseGradient(trig.cos, trig.sin)
|
|
658
2248
|
_apply_dnfr_hook(
|
|
659
2249
|
G,
|
|
660
2250
|
{"phase": g_phase},
|
|
661
2251
|
weights={"phase": 1.0},
|
|
662
2252
|
hook_name="dnfr_phase_only",
|
|
663
|
-
note="
|
|
2253
|
+
note="Example hook.",
|
|
2254
|
+
n_jobs=n_jobs,
|
|
664
2255
|
)
|
|
665
2256
|
|
|
666
2257
|
|
|
667
|
-
def dnfr_epi_vf_mixed(G) -> None:
|
|
668
|
-
"""Example: ΔNFR without phase, mixing EPI and νf.
|
|
669
|
-
|
|
670
|
-
def g_epi(G, n, nd):
|
|
671
|
-
epi_i = get_attr(nd, ALIAS_EPI, 0.0)
|
|
672
|
-
neighbors = list(G.neighbors(n))
|
|
673
|
-
if neighbors:
|
|
674
|
-
total = 0.0
|
|
675
|
-
for v in neighbors:
|
|
676
|
-
total += float(get_attr(G.nodes[v], ALIAS_EPI, epi_i))
|
|
677
|
-
epi_bar = total / len(neighbors)
|
|
678
|
-
else:
|
|
679
|
-
epi_bar = float(epi_i)
|
|
680
|
-
return epi_bar - epi_i
|
|
2258
|
+
def dnfr_epi_vf_mixed(G: TNFRGraph, *, n_jobs: int | None = None) -> None:
|
|
2259
|
+
"""Example: ΔNFR without phase, mixing EPI and νf.
|
|
681
2260
|
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
else:
|
|
691
|
-
vf_bar = float(vf_i)
|
|
692
|
-
return vf_bar - vf_i
|
|
2261
|
+
Parameters
|
|
2262
|
+
----------
|
|
2263
|
+
G : nx.Graph
|
|
2264
|
+
Graph whose nodes receive the ΔNFR assignment.
|
|
2265
|
+
n_jobs : int | None, optional
|
|
2266
|
+
Parallel worker hint used when NumPy is unavailable. Defaults to
|
|
2267
|
+
serial execution.
|
|
2268
|
+
"""
|
|
693
2269
|
|
|
2270
|
+
epi_values = {n: float(get_attr(nd, ALIAS_EPI, 0.0)) for n, nd in G.nodes(data=True)}
|
|
2271
|
+
vf_values = {n: float(get_attr(nd, ALIAS_VF, 0.0)) for n, nd in G.nodes(data=True)}
|
|
2272
|
+
grads = {
|
|
2273
|
+
"epi": _NeighborAverageGradient(ALIAS_EPI, epi_values),
|
|
2274
|
+
"vf": _NeighborAverageGradient(ALIAS_VF, vf_values),
|
|
2275
|
+
}
|
|
694
2276
|
_apply_dnfr_hook(
|
|
695
2277
|
G,
|
|
696
|
-
|
|
2278
|
+
grads,
|
|
697
2279
|
weights={"phase": 0.0, "epi": 0.5, "vf": 0.5},
|
|
698
2280
|
hook_name="dnfr_epi_vf_mixed",
|
|
699
|
-
note="
|
|
2281
|
+
note="Example hook.",
|
|
2282
|
+
n_jobs=n_jobs,
|
|
700
2283
|
)
|
|
701
2284
|
|
|
702
2285
|
|
|
703
|
-
def dnfr_laplacian(G) -> None:
|
|
704
|
-
"""Explicit topological gradient using Laplacian over EPI and νf.
|
|
2286
|
+
def dnfr_laplacian(G: TNFRGraph, *, n_jobs: int | None = None) -> None:
|
|
2287
|
+
"""Explicit topological gradient using Laplacian over EPI and νf.
|
|
2288
|
+
|
|
2289
|
+
Parameters
|
|
2290
|
+
----------
|
|
2291
|
+
G : nx.Graph
|
|
2292
|
+
Graph whose nodes receive the ΔNFR assignment.
|
|
2293
|
+
n_jobs : int | None, optional
|
|
2294
|
+
Parallel worker hint used when NumPy is unavailable. Defaults to
|
|
2295
|
+
serial execution.
|
|
2296
|
+
"""
|
|
2297
|
+
|
|
705
2298
|
weights_cfg = get_param(G, "DNFR_WEIGHTS")
|
|
706
2299
|
wE = float(weights_cfg.get("epi", DEFAULTS["DNFR_WEIGHTS"]["epi"]))
|
|
707
2300
|
wV = float(weights_cfg.get("vf", DEFAULTS["DNFR_WEIGHTS"]["vf"]))
|
|
708
2301
|
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
)
|
|
716
|
-
return epi_bar - epi
|
|
717
|
-
|
|
718
|
-
def g_vf(G, n, nd):
|
|
719
|
-
vf = get_attr(nd, ALIAS_VF, 0.0)
|
|
720
|
-
neigh = list(G.neighbors(n))
|
|
721
|
-
deg = len(neigh) or 1
|
|
722
|
-
vf_bar = sum(get_attr(G.nodes[v], ALIAS_VF, vf) for v in neigh) / deg
|
|
723
|
-
return vf_bar - vf
|
|
724
|
-
|
|
2302
|
+
epi_values = {n: float(get_attr(nd, ALIAS_EPI, 0.0)) for n, nd in G.nodes(data=True)}
|
|
2303
|
+
vf_values = {n: float(get_attr(nd, ALIAS_VF, 0.0)) for n, nd in G.nodes(data=True)}
|
|
2304
|
+
grads = {
|
|
2305
|
+
"epi": _NeighborAverageGradient(ALIAS_EPI, epi_values),
|
|
2306
|
+
"vf": _NeighborAverageGradient(ALIAS_VF, vf_values),
|
|
2307
|
+
}
|
|
725
2308
|
_apply_dnfr_hook(
|
|
726
2309
|
G,
|
|
727
|
-
|
|
2310
|
+
grads,
|
|
728
2311
|
weights={"epi": wE, "vf": wV},
|
|
729
2312
|
hook_name="dnfr_laplacian",
|
|
730
|
-
note="
|
|
2313
|
+
note="Topological gradient",
|
|
2314
|
+
n_jobs=n_jobs,
|
|
731
2315
|
)
|
|
732
|
-
|
|
733
|
-
|