tnfr 3.0.3__py3-none-any.whl → 8.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tnfr might be problematic. Click here for more details.

Files changed (360) hide show
  1. tnfr/__init__.py +375 -56
  2. tnfr/__init__.pyi +33 -0
  3. tnfr/_compat.py +10 -0
  4. tnfr/_generated_version.py +34 -0
  5. tnfr/_version.py +49 -0
  6. tnfr/_version.pyi +7 -0
  7. tnfr/alias.py +723 -0
  8. tnfr/alias.pyi +108 -0
  9. tnfr/backends/__init__.py +354 -0
  10. tnfr/backends/jax_backend.py +173 -0
  11. tnfr/backends/numpy_backend.py +238 -0
  12. tnfr/backends/optimized_numpy.py +420 -0
  13. tnfr/backends/torch_backend.py +408 -0
  14. tnfr/cache.py +171 -0
  15. tnfr/cache.pyi +13 -0
  16. tnfr/cli/__init__.py +110 -0
  17. tnfr/cli/__init__.pyi +26 -0
  18. tnfr/cli/arguments.py +489 -0
  19. tnfr/cli/arguments.pyi +29 -0
  20. tnfr/cli/execution.py +914 -0
  21. tnfr/cli/execution.pyi +70 -0
  22. tnfr/cli/interactive_validator.py +614 -0
  23. tnfr/cli/utils.py +51 -0
  24. tnfr/cli/utils.pyi +7 -0
  25. tnfr/cli/validate.py +236 -0
  26. tnfr/compat/__init__.py +85 -0
  27. tnfr/compat/dataclass.py +136 -0
  28. tnfr/compat/jsonschema_stub.py +61 -0
  29. tnfr/compat/matplotlib_stub.py +73 -0
  30. tnfr/compat/numpy_stub.py +155 -0
  31. tnfr/config/__init__.py +224 -0
  32. tnfr/config/__init__.pyi +10 -0
  33. tnfr/config/constants.py +104 -0
  34. tnfr/config/constants.pyi +12 -0
  35. tnfr/config/defaults.py +54 -0
  36. tnfr/config/defaults_core.py +212 -0
  37. tnfr/config/defaults_init.py +33 -0
  38. tnfr/config/defaults_metric.py +104 -0
  39. tnfr/config/feature_flags.py +81 -0
  40. tnfr/config/feature_flags.pyi +16 -0
  41. tnfr/config/glyph_constants.py +31 -0
  42. tnfr/config/init.py +77 -0
  43. tnfr/config/init.pyi +8 -0
  44. tnfr/config/operator_names.py +254 -0
  45. tnfr/config/operator_names.pyi +36 -0
  46. tnfr/config/physics_derivation.py +354 -0
  47. tnfr/config/presets.py +83 -0
  48. tnfr/config/presets.pyi +7 -0
  49. tnfr/config/security.py +927 -0
  50. tnfr/config/thresholds.py +114 -0
  51. tnfr/config/tnfr_config.py +498 -0
  52. tnfr/constants/__init__.py +92 -0
  53. tnfr/constants/__init__.pyi +92 -0
  54. tnfr/constants/aliases.py +33 -0
  55. tnfr/constants/aliases.pyi +27 -0
  56. tnfr/constants/init.py +33 -0
  57. tnfr/constants/init.pyi +12 -0
  58. tnfr/constants/metric.py +104 -0
  59. tnfr/constants/metric.pyi +19 -0
  60. tnfr/core/__init__.py +33 -0
  61. tnfr/core/container.py +226 -0
  62. tnfr/core/default_implementations.py +329 -0
  63. tnfr/core/interfaces.py +279 -0
  64. tnfr/dynamics/__init__.py +238 -0
  65. tnfr/dynamics/__init__.pyi +83 -0
  66. tnfr/dynamics/adaptation.py +267 -0
  67. tnfr/dynamics/adaptation.pyi +7 -0
  68. tnfr/dynamics/adaptive_sequences.py +189 -0
  69. tnfr/dynamics/adaptive_sequences.pyi +14 -0
  70. tnfr/dynamics/aliases.py +23 -0
  71. tnfr/dynamics/aliases.pyi +19 -0
  72. tnfr/dynamics/bifurcation.py +232 -0
  73. tnfr/dynamics/canonical.py +229 -0
  74. tnfr/dynamics/canonical.pyi +48 -0
  75. tnfr/dynamics/coordination.py +385 -0
  76. tnfr/dynamics/coordination.pyi +25 -0
  77. tnfr/dynamics/dnfr.py +3034 -0
  78. tnfr/dynamics/dnfr.pyi +26 -0
  79. tnfr/dynamics/dynamic_limits.py +225 -0
  80. tnfr/dynamics/feedback.py +252 -0
  81. tnfr/dynamics/feedback.pyi +24 -0
  82. tnfr/dynamics/fused_dnfr.py +454 -0
  83. tnfr/dynamics/homeostasis.py +157 -0
  84. tnfr/dynamics/homeostasis.pyi +14 -0
  85. tnfr/dynamics/integrators.py +661 -0
  86. tnfr/dynamics/integrators.pyi +36 -0
  87. tnfr/dynamics/learning.py +310 -0
  88. tnfr/dynamics/learning.pyi +33 -0
  89. tnfr/dynamics/metabolism.py +254 -0
  90. tnfr/dynamics/nbody.py +796 -0
  91. tnfr/dynamics/nbody_tnfr.py +783 -0
  92. tnfr/dynamics/propagation.py +326 -0
  93. tnfr/dynamics/runtime.py +908 -0
  94. tnfr/dynamics/runtime.pyi +77 -0
  95. tnfr/dynamics/sampling.py +36 -0
  96. tnfr/dynamics/sampling.pyi +7 -0
  97. tnfr/dynamics/selectors.py +711 -0
  98. tnfr/dynamics/selectors.pyi +85 -0
  99. tnfr/dynamics/structural_clip.py +207 -0
  100. tnfr/errors/__init__.py +37 -0
  101. tnfr/errors/contextual.py +492 -0
  102. tnfr/execution.py +223 -0
  103. tnfr/execution.pyi +45 -0
  104. tnfr/extensions/__init__.py +205 -0
  105. tnfr/extensions/__init__.pyi +18 -0
  106. tnfr/extensions/base.py +173 -0
  107. tnfr/extensions/base.pyi +35 -0
  108. tnfr/extensions/business/__init__.py +71 -0
  109. tnfr/extensions/business/__init__.pyi +11 -0
  110. tnfr/extensions/business/cookbook.py +88 -0
  111. tnfr/extensions/business/cookbook.pyi +8 -0
  112. tnfr/extensions/business/health_analyzers.py +202 -0
  113. tnfr/extensions/business/health_analyzers.pyi +9 -0
  114. tnfr/extensions/business/patterns.py +183 -0
  115. tnfr/extensions/business/patterns.pyi +8 -0
  116. tnfr/extensions/medical/__init__.py +73 -0
  117. tnfr/extensions/medical/__init__.pyi +11 -0
  118. tnfr/extensions/medical/cookbook.py +88 -0
  119. tnfr/extensions/medical/cookbook.pyi +8 -0
  120. tnfr/extensions/medical/health_analyzers.py +181 -0
  121. tnfr/extensions/medical/health_analyzers.pyi +9 -0
  122. tnfr/extensions/medical/patterns.py +163 -0
  123. tnfr/extensions/medical/patterns.pyi +8 -0
  124. tnfr/flatten.py +262 -0
  125. tnfr/flatten.pyi +21 -0
  126. tnfr/gamma.py +354 -0
  127. tnfr/gamma.pyi +36 -0
  128. tnfr/glyph_history.py +377 -0
  129. tnfr/glyph_history.pyi +35 -0
  130. tnfr/glyph_runtime.py +19 -0
  131. tnfr/glyph_runtime.pyi +8 -0
  132. tnfr/immutable.py +218 -0
  133. tnfr/immutable.pyi +36 -0
  134. tnfr/initialization.py +203 -0
  135. tnfr/initialization.pyi +65 -0
  136. tnfr/io.py +10 -0
  137. tnfr/io.pyi +13 -0
  138. tnfr/locking.py +37 -0
  139. tnfr/locking.pyi +7 -0
  140. tnfr/mathematics/__init__.py +79 -0
  141. tnfr/mathematics/backend.py +453 -0
  142. tnfr/mathematics/backend.pyi +99 -0
  143. tnfr/mathematics/dynamics.py +408 -0
  144. tnfr/mathematics/dynamics.pyi +90 -0
  145. tnfr/mathematics/epi.py +391 -0
  146. tnfr/mathematics/epi.pyi +65 -0
  147. tnfr/mathematics/generators.py +242 -0
  148. tnfr/mathematics/generators.pyi +29 -0
  149. tnfr/mathematics/metrics.py +119 -0
  150. tnfr/mathematics/metrics.pyi +16 -0
  151. tnfr/mathematics/operators.py +239 -0
  152. tnfr/mathematics/operators.pyi +59 -0
  153. tnfr/mathematics/operators_factory.py +124 -0
  154. tnfr/mathematics/operators_factory.pyi +11 -0
  155. tnfr/mathematics/projection.py +87 -0
  156. tnfr/mathematics/projection.pyi +33 -0
  157. tnfr/mathematics/runtime.py +182 -0
  158. tnfr/mathematics/runtime.pyi +64 -0
  159. tnfr/mathematics/spaces.py +256 -0
  160. tnfr/mathematics/spaces.pyi +83 -0
  161. tnfr/mathematics/transforms.py +305 -0
  162. tnfr/mathematics/transforms.pyi +62 -0
  163. tnfr/metrics/__init__.py +79 -0
  164. tnfr/metrics/__init__.pyi +20 -0
  165. tnfr/metrics/buffer_cache.py +163 -0
  166. tnfr/metrics/buffer_cache.pyi +24 -0
  167. tnfr/metrics/cache_utils.py +214 -0
  168. tnfr/metrics/coherence.py +2009 -0
  169. tnfr/metrics/coherence.pyi +129 -0
  170. tnfr/metrics/common.py +158 -0
  171. tnfr/metrics/common.pyi +35 -0
  172. tnfr/metrics/core.py +316 -0
  173. tnfr/metrics/core.pyi +13 -0
  174. tnfr/metrics/diagnosis.py +833 -0
  175. tnfr/metrics/diagnosis.pyi +86 -0
  176. tnfr/metrics/emergence.py +245 -0
  177. tnfr/metrics/export.py +179 -0
  178. tnfr/metrics/export.pyi +7 -0
  179. tnfr/metrics/glyph_timing.py +379 -0
  180. tnfr/metrics/glyph_timing.pyi +81 -0
  181. tnfr/metrics/learning_metrics.py +280 -0
  182. tnfr/metrics/learning_metrics.pyi +21 -0
  183. tnfr/metrics/phase_coherence.py +351 -0
  184. tnfr/metrics/phase_compatibility.py +349 -0
  185. tnfr/metrics/reporting.py +183 -0
  186. tnfr/metrics/reporting.pyi +25 -0
  187. tnfr/metrics/sense_index.py +1203 -0
  188. tnfr/metrics/sense_index.pyi +9 -0
  189. tnfr/metrics/trig.py +373 -0
  190. tnfr/metrics/trig.pyi +13 -0
  191. tnfr/metrics/trig_cache.py +233 -0
  192. tnfr/metrics/trig_cache.pyi +10 -0
  193. tnfr/multiscale/__init__.py +32 -0
  194. tnfr/multiscale/hierarchical.py +517 -0
  195. tnfr/node.py +763 -0
  196. tnfr/node.pyi +139 -0
  197. tnfr/observers.py +255 -130
  198. tnfr/observers.pyi +31 -0
  199. tnfr/ontosim.py +144 -137
  200. tnfr/ontosim.pyi +28 -0
  201. tnfr/operators/__init__.py +1672 -0
  202. tnfr/operators/__init__.pyi +31 -0
  203. tnfr/operators/algebra.py +277 -0
  204. tnfr/operators/canonical_patterns.py +420 -0
  205. tnfr/operators/cascade.py +267 -0
  206. tnfr/operators/cycle_detection.py +358 -0
  207. tnfr/operators/definitions.py +4108 -0
  208. tnfr/operators/definitions.pyi +78 -0
  209. tnfr/operators/grammar.py +1164 -0
  210. tnfr/operators/grammar.pyi +140 -0
  211. tnfr/operators/hamiltonian.py +710 -0
  212. tnfr/operators/health_analyzer.py +809 -0
  213. tnfr/operators/jitter.py +272 -0
  214. tnfr/operators/jitter.pyi +11 -0
  215. tnfr/operators/lifecycle.py +314 -0
  216. tnfr/operators/metabolism.py +618 -0
  217. tnfr/operators/metrics.py +2138 -0
  218. tnfr/operators/network_analysis/__init__.py +27 -0
  219. tnfr/operators/network_analysis/source_detection.py +186 -0
  220. tnfr/operators/nodal_equation.py +395 -0
  221. tnfr/operators/pattern_detection.py +660 -0
  222. tnfr/operators/patterns.py +669 -0
  223. tnfr/operators/postconditions/__init__.py +38 -0
  224. tnfr/operators/postconditions/mutation.py +236 -0
  225. tnfr/operators/preconditions/__init__.py +1226 -0
  226. tnfr/operators/preconditions/coherence.py +305 -0
  227. tnfr/operators/preconditions/dissonance.py +236 -0
  228. tnfr/operators/preconditions/emission.py +128 -0
  229. tnfr/operators/preconditions/mutation.py +580 -0
  230. tnfr/operators/preconditions/reception.py +125 -0
  231. tnfr/operators/preconditions/resonance.py +364 -0
  232. tnfr/operators/registry.py +74 -0
  233. tnfr/operators/registry.pyi +9 -0
  234. tnfr/operators/remesh.py +1809 -0
  235. tnfr/operators/remesh.pyi +26 -0
  236. tnfr/operators/structural_units.py +268 -0
  237. tnfr/operators/unified_grammar.py +105 -0
  238. tnfr/parallel/__init__.py +54 -0
  239. tnfr/parallel/auto_scaler.py +234 -0
  240. tnfr/parallel/distributed.py +384 -0
  241. tnfr/parallel/engine.py +238 -0
  242. tnfr/parallel/gpu_engine.py +420 -0
  243. tnfr/parallel/monitoring.py +248 -0
  244. tnfr/parallel/partitioner.py +459 -0
  245. tnfr/py.typed +0 -0
  246. tnfr/recipes/__init__.py +22 -0
  247. tnfr/recipes/cookbook.py +743 -0
  248. tnfr/rng.py +178 -0
  249. tnfr/rng.pyi +26 -0
  250. tnfr/schemas/__init__.py +8 -0
  251. tnfr/schemas/grammar.json +94 -0
  252. tnfr/sdk/__init__.py +107 -0
  253. tnfr/sdk/__init__.pyi +19 -0
  254. tnfr/sdk/adaptive_system.py +173 -0
  255. tnfr/sdk/adaptive_system.pyi +21 -0
  256. tnfr/sdk/builders.py +370 -0
  257. tnfr/sdk/builders.pyi +51 -0
  258. tnfr/sdk/fluent.py +1121 -0
  259. tnfr/sdk/fluent.pyi +74 -0
  260. tnfr/sdk/templates.py +342 -0
  261. tnfr/sdk/templates.pyi +41 -0
  262. tnfr/sdk/utils.py +341 -0
  263. tnfr/secure_config.py +46 -0
  264. tnfr/security/__init__.py +70 -0
  265. tnfr/security/database.py +514 -0
  266. tnfr/security/subprocess.py +503 -0
  267. tnfr/security/validation.py +290 -0
  268. tnfr/selector.py +247 -0
  269. tnfr/selector.pyi +19 -0
  270. tnfr/sense.py +378 -0
  271. tnfr/sense.pyi +23 -0
  272. tnfr/services/__init__.py +17 -0
  273. tnfr/services/orchestrator.py +325 -0
  274. tnfr/sparse/__init__.py +39 -0
  275. tnfr/sparse/representations.py +492 -0
  276. tnfr/structural.py +705 -0
  277. tnfr/structural.pyi +83 -0
  278. tnfr/telemetry/__init__.py +35 -0
  279. tnfr/telemetry/cache_metrics.py +226 -0
  280. tnfr/telemetry/cache_metrics.pyi +64 -0
  281. tnfr/telemetry/nu_f.py +422 -0
  282. tnfr/telemetry/nu_f.pyi +108 -0
  283. tnfr/telemetry/verbosity.py +36 -0
  284. tnfr/telemetry/verbosity.pyi +15 -0
  285. tnfr/tokens.py +58 -0
  286. tnfr/tokens.pyi +36 -0
  287. tnfr/tools/__init__.py +20 -0
  288. tnfr/tools/domain_templates.py +478 -0
  289. tnfr/tools/sequence_generator.py +846 -0
  290. tnfr/topology/__init__.py +13 -0
  291. tnfr/topology/asymmetry.py +151 -0
  292. tnfr/trace.py +543 -0
  293. tnfr/trace.pyi +42 -0
  294. tnfr/tutorials/__init__.py +38 -0
  295. tnfr/tutorials/autonomous_evolution.py +285 -0
  296. tnfr/tutorials/interactive.py +1576 -0
  297. tnfr/tutorials/structural_metabolism.py +238 -0
  298. tnfr/types.py +775 -0
  299. tnfr/types.pyi +357 -0
  300. tnfr/units.py +68 -0
  301. tnfr/units.pyi +13 -0
  302. tnfr/utils/__init__.py +282 -0
  303. tnfr/utils/__init__.pyi +215 -0
  304. tnfr/utils/cache.py +4223 -0
  305. tnfr/utils/cache.pyi +470 -0
  306. tnfr/utils/callbacks.py +375 -0
  307. tnfr/utils/callbacks.pyi +49 -0
  308. tnfr/utils/chunks.py +108 -0
  309. tnfr/utils/chunks.pyi +22 -0
  310. tnfr/utils/data.py +428 -0
  311. tnfr/utils/data.pyi +74 -0
  312. tnfr/utils/graph.py +85 -0
  313. tnfr/utils/graph.pyi +10 -0
  314. tnfr/utils/init.py +821 -0
  315. tnfr/utils/init.pyi +80 -0
  316. tnfr/utils/io.py +559 -0
  317. tnfr/utils/io.pyi +66 -0
  318. tnfr/utils/numeric.py +114 -0
  319. tnfr/utils/numeric.pyi +21 -0
  320. tnfr/validation/__init__.py +257 -0
  321. tnfr/validation/__init__.pyi +85 -0
  322. tnfr/validation/compatibility.py +460 -0
  323. tnfr/validation/compatibility.pyi +6 -0
  324. tnfr/validation/config.py +73 -0
  325. tnfr/validation/graph.py +139 -0
  326. tnfr/validation/graph.pyi +18 -0
  327. tnfr/validation/input_validation.py +755 -0
  328. tnfr/validation/invariants.py +712 -0
  329. tnfr/validation/rules.py +253 -0
  330. tnfr/validation/rules.pyi +44 -0
  331. tnfr/validation/runtime.py +279 -0
  332. tnfr/validation/runtime.pyi +28 -0
  333. tnfr/validation/sequence_validator.py +162 -0
  334. tnfr/validation/soft_filters.py +170 -0
  335. tnfr/validation/soft_filters.pyi +32 -0
  336. tnfr/validation/spectral.py +164 -0
  337. tnfr/validation/spectral.pyi +42 -0
  338. tnfr/validation/validator.py +1266 -0
  339. tnfr/validation/window.py +39 -0
  340. tnfr/validation/window.pyi +1 -0
  341. tnfr/visualization/__init__.py +98 -0
  342. tnfr/visualization/cascade_viz.py +256 -0
  343. tnfr/visualization/hierarchy.py +284 -0
  344. tnfr/visualization/sequence_plotter.py +784 -0
  345. tnfr/viz/__init__.py +60 -0
  346. tnfr/viz/matplotlib.py +278 -0
  347. tnfr/viz/matplotlib.pyi +35 -0
  348. tnfr-8.5.0.dist-info/METADATA +573 -0
  349. tnfr-8.5.0.dist-info/RECORD +353 -0
  350. tnfr-8.5.0.dist-info/entry_points.txt +3 -0
  351. tnfr-3.0.3.dist-info/licenses/LICENSE.txt → tnfr-8.5.0.dist-info/licenses/LICENSE.md +1 -1
  352. tnfr/constants.py +0 -183
  353. tnfr/dynamics.py +0 -543
  354. tnfr/helpers.py +0 -198
  355. tnfr/main.py +0 -37
  356. tnfr/operators.py +0 -296
  357. tnfr-3.0.3.dist-info/METADATA +0 -35
  358. tnfr-3.0.3.dist-info/RECORD +0 -13
  359. {tnfr-3.0.3.dist-info → tnfr-8.5.0.dist-info}/WHEEL +0 -0
  360. {tnfr-3.0.3.dist-info → tnfr-8.5.0.dist-info}/top_level.txt +0 -0
tnfr/alias.py ADDED
@@ -0,0 +1,723 @@
1
+ """Attribute helpers supporting alias keys.
2
+
3
+ ``AliasAccessor`` provides the main implementation for dealing with
4
+ alias-based attribute access. Legacy wrappers ``alias_get`` and
5
+ ``alias_set`` have been removed; use :func:`get_attr` and
6
+ :func:`set_attr` instead.
7
+
8
+ CRITICAL: Canonical Attribute Access
9
+ =====================================
10
+
11
+ **ALWAYS use the alias system for reading/writing TNFR attributes.**
12
+
13
+ The TNFR canonical attribute keys use Unicode symbols (e.g., 'νf' for structural
14
+ frequency), but NetworkX and Python code often use ASCII equivalents (e.g., 'vf').
15
+ This creates a critical inconsistency:
16
+
17
+ **WRONG (breaks canonicity)**:
18
+ >>> G.add_node(0, vf=1.0) # Uses ASCII 'vf' key
19
+ >>> value = G.nodes[0]['vf'] # Reads ASCII 'vf' - may not exist!
20
+ >>> G.nodes[0]['vf'] = 2.0 # Writes ASCII 'vf' - wrong key!
21
+
22
+ **CORRECT (maintains canonicity)**:
23
+ >>> from tnfr.alias import set_vf, get_attr
24
+ >>> from tnfr.constants.aliases import ALIAS_VF
25
+ >>> from tnfr.constants import VF_PRIMARY
26
+ >>>
27
+ >>> # For initialization, use canonical setters:
28
+ >>> set_vf(G, 0, 1.0) # Writes to 'νf' (Greek nu)
29
+ >>>
30
+ >>> # For reading, use canonical getters:
31
+ >>> value = get_attr(G.nodes[0], ALIAS_VF, 0.0) # Reads from 'νf'
32
+ >>>
33
+ >>> # Or use PRIMARY constants in add_node:
34
+ >>> G.add_node(1, **{VF_PRIMARY: 1.0}) # Writes to 'νf' directly
35
+
36
+ **Why This Matters**:
37
+ - The alias system tries ALL aliases in order: ('νf', 'nu_f', 'nu-f', 'nu', 'freq', 'frequency')
38
+ - If you write to 'vf', the data is stored under a key NOT in the alias list
39
+ - Reading via get_attr() will return the default (0.0) instead of your value
40
+ - This breaks the nodal equation: ∂EPI/∂t = νf · ΔNFR(t)
41
+
42
+ **For Tests**:
43
+ >>> from tnfr.structural import create_nfr
44
+ >>> # PREFERRED: Use create_nfr which handles canonicity
45
+ >>> G, node = create_nfr("test", vf=1.0, epi=0.5, theta=0.0)
46
+ >>>
47
+ >>> # ALTERNATIVE: Manual initialization with canonical setters
48
+ >>> from tnfr.alias import set_vf, get_attr
49
+ >>> from tnfr.constants.aliases import ALIAS_VF
50
+ >>> G = nx.Graph()
51
+ >>> G.add_node(0, theta=0.0, EPI=1.0, Si=0.5) # Other attrs OK
52
+ >>> set_vf(G, 0, 1.0) # Use canonical setter for vf
53
+ >>> value = get_attr(G.nodes[0], ALIAS_VF, 0.0) # Use canonical getter
54
+
55
+ **Applies to**: νf (vf), θ (theta), ΔNFR (dnfr), and other aliased attributes.
56
+ See ALIAS_VF, ALIAS_THETA, ALIAS_DNFR in tnfr.constants.aliases for full lists.
57
+ """
58
+
59
+ from __future__ import annotations
60
+
61
+ from collections import defaultdict
62
+ from collections.abc import Iterable, Mapping, MutableMapping, Sized
63
+ from functools import lru_cache, partial
64
+ from threading import Lock
65
+ from types import ModuleType
66
+ from typing import (
67
+ TYPE_CHECKING,
68
+ Any,
69
+ Callable,
70
+ Generic,
71
+ Hashable,
72
+ Optional,
73
+ TypeVar,
74
+ cast,
75
+ )
76
+
77
+ from .compat.dataclass import dataclass
78
+ from .constants.aliases import ALIAS_DNFR, ALIAS_THETA, ALIAS_VF
79
+ from .types import FloatArray, NodeId
80
+ from .utils import convert_value
81
+
82
+ if TYPE_CHECKING: # pragma: no cover
83
+ import networkx
84
+
85
+ T = TypeVar("T")
86
+
87
+
88
+ def _bepi_to_float(value: Any) -> float:
89
+ """Extract scalar from BEPIElement dict or convert value to float.
90
+
91
+ When operators transform EPI from float to BEPIElement dict, this helper
92
+ extracts the maximum magnitude from the 'continuous' component. This
93
+ preserves ΔNFR semantics (§3.3) and structural metrics accuracy (§3.9).
94
+
95
+ Parameters
96
+ ----------
97
+ value : Any
98
+ Value to convert. If it's a dict with a 'continuous' key, extracts
99
+ the maximum magnitude. Otherwise converts directly to float.
100
+
101
+ Returns
102
+ -------
103
+ float
104
+ Scalar representation of the value.
105
+ """
106
+ if isinstance(value, dict) and "continuous" in value:
107
+ cont = value["continuous"]
108
+ if isinstance(cont, tuple):
109
+ return float(max(abs(c) for c in cont)) if cont else 0.0
110
+ return float(abs(cont))
111
+ return float(value)
112
+
113
+
114
+ @lru_cache(maxsize=128)
115
+ def _alias_cache(alias_tuple: tuple[str, ...]) -> tuple[str, ...]:
116
+ """Validate and cache alias tuples.
117
+
118
+ ``functools.lru_cache`` protects this function with an internal lock,
119
+ which is sufficient for thread-safe access; no explicit locking is
120
+ required.
121
+ """
122
+ if not alias_tuple:
123
+ raise ValueError("'aliases' must contain at least one key")
124
+ if not all(isinstance(a, str) for a in alias_tuple):
125
+ raise TypeError("'aliases' elements must be strings")
126
+ return alias_tuple
127
+
128
+
129
+ class AliasAccessor(Generic[T]):
130
+ """Helper providing ``get`` and ``set`` for alias-based attributes.
131
+
132
+ This class implements all logic for resolving and assigning values
133
+ using alias keys. Helper functions :func:`get_attr` and
134
+ :func:`set_attr` delegate to a module-level instance of this class.
135
+ """
136
+
137
+ def __init__(
138
+ self, conv: Callable[[Any], T] | None = None, default: T | None = None
139
+ ) -> None:
140
+ self._conv = conv
141
+ self._default = default
142
+ # expose cache for testing and manual control
143
+ self._alias_cache = _alias_cache
144
+ self._key_cache: dict[tuple[int, tuple[str, ...]], tuple[str, int]] = {}
145
+ self._lock = Lock()
146
+
147
+ def _prepare(
148
+ self,
149
+ aliases: Iterable[str],
150
+ conv: Callable[[Any], T] | None,
151
+ default: Optional[T] = None,
152
+ ) -> tuple[tuple[str, ...], Callable[[Any], T], Optional[T]]:
153
+ """Validate ``aliases`` and resolve ``conv`` and ``default``.
154
+
155
+ Parameters
156
+ ----------
157
+ aliases:
158
+ Iterable of alias strings. Must not be a single string.
159
+ conv:
160
+ Conversion callable. If ``None``, the accessor's default
161
+ converter is used.
162
+ default:
163
+ Default value to use if no alias is found. If ``None``, the
164
+ accessor's default is used.
165
+ """
166
+
167
+ if isinstance(aliases, str) or not isinstance(aliases, Iterable):
168
+ raise TypeError("'aliases' must be a non-string iterable")
169
+ aliases = _alias_cache(tuple(aliases))
170
+ if conv is None:
171
+ conv = self._conv
172
+ if conv is None:
173
+ raise TypeError("'conv' must be provided")
174
+ if default is None:
175
+ default = self._default
176
+ return aliases, conv, default
177
+
178
+ def _resolve_cache_key(
179
+ self, d: dict[str, Any], aliases: tuple[str, ...]
180
+ ) -> tuple[tuple[int, tuple[str, ...]], str | None]:
181
+ """Return cache entry for ``d`` and ``aliases`` if still valid.
182
+
183
+ The mapping remains coherent only when the cached key exists in
184
+ ``d`` and the dictionary size has not changed. Invalid entries are
185
+ removed to preserve structural consistency.
186
+ """
187
+
188
+ cache_key = (id(d), aliases)
189
+ with self._lock:
190
+ cached = self._key_cache.get(cache_key)
191
+ if cached is not None:
192
+ key, size = cached
193
+ if size == len(d) and key in d:
194
+ return cache_key, key
195
+ with self._lock:
196
+ self._key_cache.pop(cache_key, None)
197
+ return cache_key, None
198
+
199
+ def get(
200
+ self,
201
+ d: dict[str, Any],
202
+ aliases: Iterable[str],
203
+ default: Optional[T] = None,
204
+ *,
205
+ strict: bool = False,
206
+ log_level: int | None = None,
207
+ conv: Callable[[Any], T] | None = None,
208
+ ) -> Optional[T]:
209
+ """Return ``value`` for the first alias present in ``d``."""
210
+
211
+ aliases, conv, default = self._prepare(aliases, conv, default)
212
+ cache_key, key = self._resolve_cache_key(d, aliases)
213
+ if key is not None:
214
+ ok, value = convert_value(
215
+ d[key], conv, strict=strict, key=key, log_level=log_level
216
+ )
217
+ if ok:
218
+ return value
219
+ for key in aliases:
220
+ if key in d:
221
+ ok, value = convert_value(
222
+ d[key], conv, strict=strict, key=key, log_level=log_level
223
+ )
224
+ if ok:
225
+ with self._lock:
226
+ self._key_cache[cache_key] = (key, len(d))
227
+ return value
228
+ if default is not None:
229
+ ok, value = convert_value(
230
+ default,
231
+ conv,
232
+ strict=strict,
233
+ key="default",
234
+ log_level=log_level,
235
+ )
236
+ if ok:
237
+ return value
238
+ return None
239
+
240
+ def set(
241
+ self,
242
+ d: dict[str, Any],
243
+ aliases: Iterable[str],
244
+ value: Any,
245
+ conv: Callable[[Any], T] | None = None,
246
+ ) -> T:
247
+ """Write ``value`` under the first matching alias and cache the choice."""
248
+
249
+ aliases, conv, _ = self._prepare(aliases, conv)
250
+ cache_key, key = self._resolve_cache_key(d, aliases)
251
+ if key is not None:
252
+ d[key] = conv(value)
253
+ return d[key]
254
+ key = next((k for k in aliases if k in d), aliases[0])
255
+ val = conv(value)
256
+ d[key] = val
257
+ with self._lock:
258
+ self._key_cache[cache_key] = (key, len(d))
259
+ return val
260
+
261
+
262
+ _generic_accessor: AliasAccessor[Any] = AliasAccessor()
263
+
264
+
265
+ def get_theta_attr(
266
+ d: Mapping[str, Any],
267
+ default: T | None = None,
268
+ *,
269
+ strict: bool = False,
270
+ log_level: int | None = None,
271
+ conv: Callable[[Any], T] = float,
272
+ ) -> T | None:
273
+ """Return ``theta``/``phase`` using the English alias set."""
274
+ return _generic_accessor.get(
275
+ cast(dict[str, Any], d),
276
+ ALIAS_THETA,
277
+ default,
278
+ strict=strict,
279
+ log_level=log_level,
280
+ conv=conv,
281
+ )
282
+
283
+
284
+ def get_attr(
285
+ d: dict[str, Any],
286
+ aliases: Iterable[str],
287
+ default: T | None = None,
288
+ *,
289
+ strict: bool = False,
290
+ log_level: int | None = None,
291
+ conv: Callable[[Any], T] = _bepi_to_float,
292
+ ) -> T | None:
293
+ """Return the value for the first key in ``aliases`` found in ``d``.
294
+
295
+ WARNING: This function searches for keys in alias order. If you manually
296
+ wrote to a non-canonical key (e.g., 'vf' instead of 'νf'), this function
297
+ will NOT find it and will return the default value instead.
298
+
299
+ For structural frequency: ALWAYS use set_vf() to write, not d['vf'] = value.
300
+ See module docstring for detailed guidance on canonical attribute access.
301
+ """
302
+
303
+ return _generic_accessor.get(
304
+ d,
305
+ aliases,
306
+ default=default,
307
+ strict=strict,
308
+ log_level=log_level,
309
+ conv=conv,
310
+ )
311
+
312
+
313
+ def collect_attr(
314
+ G: "networkx.Graph",
315
+ nodes: Iterable[NodeId],
316
+ aliases: Iterable[str],
317
+ default: float = 0.0,
318
+ *,
319
+ np: ModuleType | None = None,
320
+ ) -> FloatArray | list[float]:
321
+ """Collect attribute values for ``nodes`` from ``G`` using ``aliases``.
322
+
323
+ Parameters
324
+ ----------
325
+ G:
326
+ Graph containing node attribute mappings.
327
+ nodes:
328
+ Iterable of node identifiers to query.
329
+ aliases:
330
+ Sequence of alias keys passed to :func:`get_attr`.
331
+ default:
332
+ Fallback value when no alias is found for a node.
333
+ np:
334
+ Optional NumPy module. When provided, the result is returned as a
335
+ NumPy array of ``float``; otherwise a Python ``list`` is returned.
336
+
337
+ Returns
338
+ -------
339
+ list or numpy.ndarray
340
+ Collected attribute values in the same order as ``nodes``.
341
+ """
342
+
343
+ def _nodes_iter_and_size(nodes: Iterable[NodeId]) -> tuple[Iterable[NodeId], int]:
344
+ if nodes is G.nodes:
345
+ return G.nodes, G.number_of_nodes()
346
+ if isinstance(nodes, Sized):
347
+ return nodes, len(nodes) # type: ignore[arg-type]
348
+ nodes_list = list(nodes)
349
+ return nodes_list, len(nodes_list)
350
+
351
+ nodes_iter, size = _nodes_iter_and_size(nodes)
352
+
353
+ def _value(node: NodeId) -> float:
354
+ return float(get_attr(G.nodes[node], aliases, default))
355
+
356
+ if np is not None:
357
+ values: FloatArray = np.fromiter(
358
+ (_value(n) for n in nodes_iter), float, count=size
359
+ )
360
+ return values
361
+ return [_value(n) for n in nodes_iter]
362
+
363
+
364
+ def collect_theta_attr(
365
+ G: "networkx.Graph",
366
+ nodes: Iterable[NodeId],
367
+ default: float = 0.0,
368
+ *,
369
+ np: ModuleType | None = None,
370
+ ) -> FloatArray | list[float]:
371
+ """Collect ``theta`` values honouring the English-only attribute contract."""
372
+
373
+ def _nodes_iter_and_size(nodes: Iterable[NodeId]) -> tuple[Iterable[NodeId], int]:
374
+ if nodes is G.nodes:
375
+ return G.nodes, G.number_of_nodes()
376
+ if isinstance(nodes, Sized):
377
+ return nodes, len(nodes) # type: ignore[arg-type]
378
+ nodes_list = list(nodes)
379
+ return nodes_list, len(nodes_list)
380
+
381
+ nodes_iter, size = _nodes_iter_and_size(nodes)
382
+
383
+ def _value(node: NodeId) -> float:
384
+ return float(get_theta_attr(G.nodes[node], default))
385
+
386
+ if np is not None:
387
+ values: FloatArray = np.fromiter(
388
+ (_value(n) for n in nodes_iter), float, count=size
389
+ )
390
+ return values
391
+
392
+ return [_value(n) for n in nodes_iter]
393
+
394
+
395
+ def set_attr_generic(
396
+ d: dict[str, Any],
397
+ aliases: Iterable[str],
398
+ value: Any,
399
+ *,
400
+ conv: Callable[[Any], T],
401
+ ) -> T:
402
+ """Assign ``value`` to the FIRST (canonical) alias key in ``aliases``.
403
+
404
+ CRITICAL: This function writes to the FIRST key in the alias tuple.
405
+ For ALIAS_VF = ('νf', 'nu_f', ...), this writes to 'νf' (Greek nu), NOT 'vf'.
406
+
407
+ If you later try to read with G.nodes[n]['vf'], you will NOT find the value.
408
+ ALWAYS use get_attr() to read what set_attr() wrote.
409
+
410
+ For high-level usage, prefer set_vf(), set_theta(), etc. which handle this correctly.
411
+ See module docstring for detailed guidance on canonical attribute access.
412
+ """
413
+
414
+ return _generic_accessor.set(d, aliases, value, conv=conv)
415
+
416
+
417
+ set_attr = partial(set_attr_generic, conv=float)
418
+
419
+ get_attr_str = partial(get_attr, conv=str)
420
+ set_attr_str = partial(set_attr_generic, conv=str)
421
+
422
+
423
+ def set_theta_attr(d: MutableMapping[str, Any], value: Any) -> float:
424
+ """Assign ``theta``/``phase`` using the English attribute names."""
425
+ result = float(value)
426
+ d["theta"] = result
427
+ d["phase"] = result
428
+ return result
429
+
430
+
431
+ # -------------------------
432
+ # Cached global maxima
433
+ # -------------------------
434
+
435
+
436
+ @dataclass(slots=True)
437
+ class AbsMaxResult:
438
+ """Absolute maximum value and the node where it occurs."""
439
+
440
+ max_value: float
441
+ node: Hashable | None
442
+
443
+
444
+ def _coerce_abs_value(value: Any) -> float:
445
+ """Return ``value`` as ``float`` treating ``None`` as ``0.0``."""
446
+
447
+ if value is None:
448
+ return 0.0
449
+ try:
450
+ return _bepi_to_float(value)
451
+ except (TypeError, ValueError):
452
+ return 0.0
453
+
454
+
455
+ def _compute_abs_max_result(
456
+ G: "networkx.Graph",
457
+ aliases: tuple[str, ...],
458
+ *,
459
+ key: str | None = None,
460
+ candidate: tuple[Hashable, float] | None = None,
461
+ ) -> AbsMaxResult:
462
+ """Return the absolute maximum (and node) for ``aliases``.
463
+
464
+ Parameters
465
+ ----------
466
+ G:
467
+ Graph containing nodal data.
468
+ aliases:
469
+ Attribute aliases to inspect.
470
+ key:
471
+ Cache key to update. When ``None``, the graph cache is untouched.
472
+ candidate:
473
+ Optional ``(node, value)`` pair representing a candidate maximum.
474
+
475
+ Returns
476
+ -------
477
+ AbsMaxResult
478
+ Structure holding the absolute maximum and the node where it
479
+ occurs. When ``candidate`` is provided, its value is treated as the
480
+ current maximum and no recomputation is performed.
481
+ """
482
+
483
+ if candidate is not None:
484
+ node, value = candidate
485
+ max_val = abs(float(value))
486
+ else:
487
+ node, max_val = max(
488
+ ((n, abs(get_attr(G.nodes[n], aliases, 0.0))) for n in G.nodes()),
489
+ key=lambda item: item[1],
490
+ default=(None, 0.0),
491
+ )
492
+ max_val = float(max_val)
493
+
494
+ if key is not None:
495
+ G.graph[key] = max_val
496
+ G.graph[f"{key}_node"] = node
497
+
498
+ return AbsMaxResult(max_value=max_val, node=node)
499
+
500
+
501
+ def multi_recompute_abs_max(
502
+ G: "networkx.Graph", alias_map: dict[str, tuple[str, ...]]
503
+ ) -> dict[str, float]:
504
+ """Return absolute maxima for each entry in ``alias_map``.
505
+
506
+ ``G`` is a :class:`networkx.Graph`. ``alias_map`` maps result keys to
507
+ alias tuples. The graph is traversed once and the absolute maximum for
508
+ each alias tuple is recorded. The returned dictionary uses the same
509
+ keys as ``alias_map``.
510
+ """
511
+
512
+ maxima: defaultdict[str, float] = defaultdict(float)
513
+ items = list(alias_map.items())
514
+ for _, nd in G.nodes(data=True):
515
+ maxima.update(
516
+ {
517
+ key: max(maxima[key], abs(get_attr(nd, aliases, 0.0)))
518
+ for key, aliases in items
519
+ }
520
+ )
521
+ return {k: float(v) for k, v in maxima.items()}
522
+
523
+
524
+ def _update_cached_abs_max(
525
+ G: "networkx.Graph",
526
+ aliases: tuple[str, ...],
527
+ n: Hashable,
528
+ value: float,
529
+ *,
530
+ key: str,
531
+ ) -> AbsMaxResult:
532
+ """Update cached absolute maxima for ``aliases``.
533
+
534
+ The current cached value is updated when ``value`` becomes the new
535
+ maximum or when the stored node matches ``n`` but its magnitude
536
+ decreases. The returned :class:`AbsMaxResult` always reflects the
537
+ cached maximum after applying the update.
538
+ """
539
+
540
+ node_key = f"{key}_node"
541
+ val = abs(float(value))
542
+ cur = _coerce_abs_value(G.graph.get(key))
543
+ cur_node = cast(Hashable | None, G.graph.get(node_key))
544
+
545
+ if val >= cur:
546
+ return _compute_abs_max_result(G, aliases, key=key, candidate=(n, val))
547
+ if cur_node == n:
548
+ return _compute_abs_max_result(G, aliases, key=key)
549
+ return AbsMaxResult(max_value=cur, node=cur_node)
550
+
551
+
552
+ def set_attr_and_cache(
553
+ G: "networkx.Graph",
554
+ n: Hashable,
555
+ aliases: tuple[str, ...],
556
+ value: float,
557
+ *,
558
+ cache: str | None = None,
559
+ extra: Callable[["networkx.Graph", Hashable, float], None] | None = None,
560
+ ) -> AbsMaxResult | None:
561
+ """Assign ``value`` to node ``n`` and optionally update cached maxima.
562
+
563
+ Returns
564
+ -------
565
+ AbsMaxResult | None
566
+ Absolute maximum information when ``cache`` is provided; otherwise
567
+ ``None``.
568
+ """
569
+
570
+ val = set_attr(G.nodes[n], aliases, value)
571
+ result: AbsMaxResult | None = None
572
+ if cache is not None:
573
+ result = _update_cached_abs_max(G, aliases, n, val, key=cache)
574
+ if extra is not None:
575
+ extra(G, n, val)
576
+ return result
577
+
578
+
579
+ def set_attr_with_max(
580
+ G: "networkx.Graph",
581
+ n: Hashable,
582
+ aliases: tuple[str, ...],
583
+ value: float,
584
+ *,
585
+ cache: str,
586
+ ) -> AbsMaxResult:
587
+ """Assign ``value`` to node ``n`` and update the global maximum.
588
+
589
+ This is a convenience wrapper around :func:`set_attr_and_cache`.
590
+ """
591
+ return cast(
592
+ AbsMaxResult,
593
+ set_attr_and_cache(G, n, aliases, value, cache=cache),
594
+ )
595
+
596
+
597
+ def set_scalar(
598
+ G: "networkx.Graph",
599
+ n: Hashable,
600
+ alias: tuple[str, ...],
601
+ value: float,
602
+ *,
603
+ cache: str | None = None,
604
+ extra: Callable[["networkx.Graph", Hashable, float], None] | None = None,
605
+ ) -> AbsMaxResult | None:
606
+ """Assign ``value`` to ``alias`` for node ``n`` and update caches.
607
+
608
+ Returns
609
+ -------
610
+ AbsMaxResult | None
611
+ Updated absolute maximum details when ``cache`` is provided.
612
+ """
613
+
614
+ return set_attr_and_cache(G, n, alias, value, cache=cache, extra=extra)
615
+
616
+
617
+ def _increment_trig_version(G: "networkx.Graph", _: Hashable, __: float) -> None:
618
+ """Increment cached trig version to invalidate trig caches."""
619
+ g = G.graph
620
+ g["_trig_version"] = int(g.get("_trig_version", 0)) + 1
621
+
622
+
623
+ SCALAR_SETTERS: dict[str, dict[str, Any]] = {
624
+ "vf": {
625
+ "alias": ALIAS_VF,
626
+ "cache": "_vfmax",
627
+ "doc": "Set ``νf`` for node ``n`` and optionally update the global maximum.",
628
+ "update_max_param": True,
629
+ },
630
+ "dnfr": {
631
+ "alias": ALIAS_DNFR,
632
+ "cache": "_dnfrmax",
633
+ "doc": "Set ``ΔNFR`` for node ``n`` and update the global maximum.",
634
+ },
635
+ "theta": {
636
+ "alias": ALIAS_THETA,
637
+ "extra": _increment_trig_version,
638
+ "doc": "Set ``theta`` for node ``n`` and invalidate trig caches.",
639
+ },
640
+ }
641
+
642
+
643
+ def _make_scalar_setter(
644
+ name: str, spec: dict[str, Any]
645
+ ) -> Callable[..., AbsMaxResult | None]:
646
+ alias = spec["alias"]
647
+ cache = spec.get("cache")
648
+ extra = spec.get("extra")
649
+ doc = spec.get("doc")
650
+ has_update = spec.get("update_max_param", False)
651
+
652
+ if has_update:
653
+
654
+ def setter(
655
+ G: "networkx.Graph",
656
+ n: Hashable,
657
+ value: float,
658
+ *,
659
+ update_max: bool = True,
660
+ ) -> AbsMaxResult | None:
661
+ cache_key = cache if update_max else None
662
+ return set_scalar(G, n, alias, value, cache=cache_key, extra=extra)
663
+
664
+ else:
665
+
666
+ def setter(
667
+ G: "networkx.Graph", n: Hashable, value: float
668
+ ) -> AbsMaxResult | None:
669
+ return set_scalar(G, n, alias, value, cache=cache, extra=extra)
670
+
671
+ setter.__name__ = f"set_{name}"
672
+ setter.__qualname__ = f"set_{name}"
673
+ setter.__doc__ = doc
674
+ return setter
675
+
676
+
677
+ for _name, _spec in SCALAR_SETTERS.items():
678
+ globals()[f"set_{_name}"] = _make_scalar_setter(_name, _spec)
679
+
680
+ del _name, _spec, _make_scalar_setter
681
+
682
+ _set_theta_impl = cast(
683
+ Callable[["networkx.Graph", Hashable, float], AbsMaxResult | None],
684
+ globals()["set_theta"],
685
+ )
686
+
687
+
688
+ def _set_theta_with_compat(
689
+ G: "networkx.Graph", n: Hashable, value: float
690
+ ) -> AbsMaxResult | None:
691
+ nd = cast(MutableMapping[str, Any], G.nodes[n])
692
+ result = _set_theta_impl(G, n, value)
693
+ theta_val = get_theta_attr(nd, value)
694
+ if theta_val is not None:
695
+ float_theta = float(theta_val)
696
+ nd["theta"] = float_theta
697
+ nd["phase"] = float_theta
698
+ return result
699
+
700
+
701
+ _set_theta_with_compat.__name__ = "set_theta"
702
+ _set_theta_with_compat.__qualname__ = "set_theta"
703
+ _set_theta_with_compat.__doc__ = _set_theta_impl.__doc__
704
+ globals()["set_theta"] = _set_theta_with_compat
705
+
706
+ __all__ = [
707
+ "AbsMaxResult",
708
+ "set_attr_generic",
709
+ "get_attr",
710
+ "get_theta_attr",
711
+ "collect_attr",
712
+ "collect_theta_attr",
713
+ "set_attr",
714
+ "get_attr_str",
715
+ "set_attr_str",
716
+ "set_theta_attr",
717
+ "set_attr_and_cache",
718
+ "set_attr_with_max",
719
+ "set_scalar",
720
+ "SCALAR_SETTERS",
721
+ *[f"set_{name}" for name in SCALAR_SETTERS],
722
+ "multi_recompute_abs_max",
723
+ ]