tnfr 4.5.1__py3-none-any.whl → 6.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (170) hide show
  1. tnfr/__init__.py +270 -90
  2. tnfr/__init__.pyi +40 -0
  3. tnfr/_compat.py +11 -0
  4. tnfr/_version.py +7 -0
  5. tnfr/_version.pyi +7 -0
  6. tnfr/alias.py +631 -0
  7. tnfr/alias.pyi +140 -0
  8. tnfr/cache.py +732 -0
  9. tnfr/cache.pyi +232 -0
  10. tnfr/callback_utils.py +381 -0
  11. tnfr/callback_utils.pyi +105 -0
  12. tnfr/cli/__init__.py +89 -0
  13. tnfr/cli/__init__.pyi +47 -0
  14. tnfr/cli/arguments.py +199 -0
  15. tnfr/cli/arguments.pyi +33 -0
  16. tnfr/cli/execution.py +322 -0
  17. tnfr/cli/execution.pyi +80 -0
  18. tnfr/cli/utils.py +34 -0
  19. tnfr/cli/utils.pyi +8 -0
  20. tnfr/config/__init__.py +12 -0
  21. tnfr/config/__init__.pyi +8 -0
  22. tnfr/config/constants.py +104 -0
  23. tnfr/config/constants.pyi +12 -0
  24. tnfr/config/init.py +36 -0
  25. tnfr/config/init.pyi +8 -0
  26. tnfr/config/operator_names.py +106 -0
  27. tnfr/config/operator_names.pyi +28 -0
  28. tnfr/config/presets.py +104 -0
  29. tnfr/config/presets.pyi +7 -0
  30. tnfr/constants/__init__.py +228 -0
  31. tnfr/constants/__init__.pyi +104 -0
  32. tnfr/constants/core.py +158 -0
  33. tnfr/constants/core.pyi +17 -0
  34. tnfr/constants/init.py +31 -0
  35. tnfr/constants/init.pyi +12 -0
  36. tnfr/constants/metric.py +102 -0
  37. tnfr/constants/metric.pyi +19 -0
  38. tnfr/constants_glyphs.py +16 -0
  39. tnfr/constants_glyphs.pyi +12 -0
  40. tnfr/dynamics/__init__.py +136 -0
  41. tnfr/dynamics/__init__.pyi +83 -0
  42. tnfr/dynamics/adaptation.py +201 -0
  43. tnfr/dynamics/aliases.py +22 -0
  44. tnfr/dynamics/coordination.py +343 -0
  45. tnfr/dynamics/dnfr.py +2315 -0
  46. tnfr/dynamics/dnfr.pyi +33 -0
  47. tnfr/dynamics/integrators.py +561 -0
  48. tnfr/dynamics/integrators.pyi +35 -0
  49. tnfr/dynamics/runtime.py +521 -0
  50. tnfr/dynamics/sampling.py +34 -0
  51. tnfr/dynamics/sampling.pyi +7 -0
  52. tnfr/dynamics/selectors.py +680 -0
  53. tnfr/execution.py +216 -0
  54. tnfr/execution.pyi +65 -0
  55. tnfr/flatten.py +283 -0
  56. tnfr/flatten.pyi +28 -0
  57. tnfr/gamma.py +320 -89
  58. tnfr/gamma.pyi +40 -0
  59. tnfr/glyph_history.py +337 -0
  60. tnfr/glyph_history.pyi +53 -0
  61. tnfr/grammar.py +23 -153
  62. tnfr/grammar.pyi +13 -0
  63. tnfr/helpers/__init__.py +151 -0
  64. tnfr/helpers/__init__.pyi +66 -0
  65. tnfr/helpers/numeric.py +88 -0
  66. tnfr/helpers/numeric.pyi +12 -0
  67. tnfr/immutable.py +214 -0
  68. tnfr/immutable.pyi +37 -0
  69. tnfr/initialization.py +199 -0
  70. tnfr/initialization.pyi +73 -0
  71. tnfr/io.py +311 -0
  72. tnfr/io.pyi +11 -0
  73. tnfr/locking.py +37 -0
  74. tnfr/locking.pyi +7 -0
  75. tnfr/metrics/__init__.py +41 -0
  76. tnfr/metrics/__init__.pyi +20 -0
  77. tnfr/metrics/coherence.py +1469 -0
  78. tnfr/metrics/common.py +149 -0
  79. tnfr/metrics/common.pyi +15 -0
  80. tnfr/metrics/core.py +259 -0
  81. tnfr/metrics/core.pyi +13 -0
  82. tnfr/metrics/diagnosis.py +840 -0
  83. tnfr/metrics/diagnosis.pyi +89 -0
  84. tnfr/metrics/export.py +151 -0
  85. tnfr/metrics/glyph_timing.py +369 -0
  86. tnfr/metrics/reporting.py +152 -0
  87. tnfr/metrics/reporting.pyi +12 -0
  88. tnfr/metrics/sense_index.py +294 -0
  89. tnfr/metrics/sense_index.pyi +9 -0
  90. tnfr/metrics/trig.py +216 -0
  91. tnfr/metrics/trig.pyi +12 -0
  92. tnfr/metrics/trig_cache.py +105 -0
  93. tnfr/metrics/trig_cache.pyi +10 -0
  94. tnfr/node.py +255 -177
  95. tnfr/node.pyi +161 -0
  96. tnfr/observers.py +154 -150
  97. tnfr/observers.pyi +46 -0
  98. tnfr/ontosim.py +135 -134
  99. tnfr/ontosim.pyi +33 -0
  100. tnfr/operators/__init__.py +452 -0
  101. tnfr/operators/__init__.pyi +31 -0
  102. tnfr/operators/definitions.py +181 -0
  103. tnfr/operators/definitions.pyi +92 -0
  104. tnfr/operators/jitter.py +266 -0
  105. tnfr/operators/jitter.pyi +11 -0
  106. tnfr/operators/registry.py +80 -0
  107. tnfr/operators/registry.pyi +15 -0
  108. tnfr/operators/remesh.py +569 -0
  109. tnfr/presets.py +10 -23
  110. tnfr/presets.pyi +7 -0
  111. tnfr/py.typed +0 -0
  112. tnfr/rng.py +440 -0
  113. tnfr/rng.pyi +14 -0
  114. tnfr/selector.py +217 -0
  115. tnfr/selector.pyi +19 -0
  116. tnfr/sense.py +307 -142
  117. tnfr/sense.pyi +30 -0
  118. tnfr/structural.py +69 -164
  119. tnfr/structural.pyi +46 -0
  120. tnfr/telemetry/__init__.py +13 -0
  121. tnfr/telemetry/verbosity.py +37 -0
  122. tnfr/tokens.py +61 -0
  123. tnfr/tokens.pyi +41 -0
  124. tnfr/trace.py +520 -95
  125. tnfr/trace.pyi +68 -0
  126. tnfr/types.py +382 -17
  127. tnfr/types.pyi +145 -0
  128. tnfr/utils/__init__.py +158 -0
  129. tnfr/utils/__init__.pyi +133 -0
  130. tnfr/utils/cache.py +755 -0
  131. tnfr/utils/cache.pyi +156 -0
  132. tnfr/utils/data.py +267 -0
  133. tnfr/utils/data.pyi +73 -0
  134. tnfr/utils/graph.py +87 -0
  135. tnfr/utils/graph.pyi +10 -0
  136. tnfr/utils/init.py +746 -0
  137. tnfr/utils/init.pyi +85 -0
  138. tnfr/utils/io.py +157 -0
  139. tnfr/utils/io.pyi +10 -0
  140. tnfr/utils/validators.py +130 -0
  141. tnfr/utils/validators.pyi +19 -0
  142. tnfr/validation/__init__.py +25 -0
  143. tnfr/validation/__init__.pyi +17 -0
  144. tnfr/validation/compatibility.py +59 -0
  145. tnfr/validation/compatibility.pyi +8 -0
  146. tnfr/validation/grammar.py +149 -0
  147. tnfr/validation/grammar.pyi +11 -0
  148. tnfr/validation/rules.py +194 -0
  149. tnfr/validation/rules.pyi +18 -0
  150. tnfr/validation/syntax.py +151 -0
  151. tnfr/validation/syntax.pyi +7 -0
  152. tnfr-6.0.0.dist-info/METADATA +135 -0
  153. tnfr-6.0.0.dist-info/RECORD +157 -0
  154. tnfr/cli.py +0 -322
  155. tnfr/config.py +0 -41
  156. tnfr/constants.py +0 -277
  157. tnfr/dynamics.py +0 -814
  158. tnfr/helpers.py +0 -264
  159. tnfr/main.py +0 -47
  160. tnfr/metrics.py +0 -597
  161. tnfr/operators.py +0 -525
  162. tnfr/program.py +0 -176
  163. tnfr/scenarios.py +0 -34
  164. tnfr/validators.py +0 -38
  165. tnfr-4.5.1.dist-info/METADATA +0 -221
  166. tnfr-4.5.1.dist-info/RECORD +0 -28
  167. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/WHEEL +0 -0
  168. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/entry_points.txt +0 -0
  169. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/licenses/LICENSE.md +0 -0
  170. {tnfr-4.5.1.dist-info → tnfr-6.0.0.dist-info}/top_level.txt +0 -0
tnfr/utils/init.py ADDED
@@ -0,0 +1,746 @@
1
+ """Core logging and import helpers for :mod:`tnfr`.
2
+
3
+ This module merges the functionality that historically lived in
4
+ ``tnfr.logging_utils`` and ``tnfr.import_utils``. The behaviour is kept
5
+ identical so downstream consumers can keep relying on the same APIs while
6
+ benefiting from a consolidated entry point under :mod:`tnfr.utils`.
7
+ """
8
+
9
+ from __future__ import annotations
10
+
11
+ import importlib
12
+ import logging
13
+ import threading
14
+ import warnings
15
+ import weakref
16
+ from collections import OrderedDict
17
+ from dataclasses import dataclass, field
18
+ from typing import Any, Callable, Hashable, Iterable, Iterator, Literal, Mapping
19
+
20
+ from ..cache import CacheManager
21
+
22
+ __all__ = (
23
+ "_configure_root",
24
+ "cached_import",
25
+ "warm_cached_import",
26
+ "LazyImportProxy",
27
+ "get_logger",
28
+ "get_numpy",
29
+ "get_nodenx",
30
+ "prune_failed_imports",
31
+ "WarnOnce",
32
+ "warn_once",
33
+ "IMPORT_LOG",
34
+ "EMIT_MAP",
35
+ "_warn_failure",
36
+ "_IMPORT_STATE",
37
+ "_reset_logging_state",
38
+ "_reset_import_state",
39
+ "_FAILED_IMPORT_LIMIT",
40
+ "_DEFAULT_CACHE_SIZE",
41
+ )
42
+
43
+
44
+ _LOGGING_CONFIGURED = False
45
+
46
+
47
+ def _reset_logging_state() -> None:
48
+ """Reset cached logging configuration state."""
49
+
50
+ global _LOGGING_CONFIGURED
51
+ _LOGGING_CONFIGURED = False
52
+
53
+
54
+ def _configure_root() -> None:
55
+ """Ensure the root logger has handlers and a default format."""
56
+
57
+ global _LOGGING_CONFIGURED
58
+ if _LOGGING_CONFIGURED:
59
+ return
60
+
61
+ root = logging.getLogger()
62
+ if not root.handlers:
63
+ kwargs = {"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"}
64
+ if root.level == logging.NOTSET:
65
+ kwargs["level"] = logging.INFO
66
+ logging.basicConfig(**kwargs)
67
+
68
+ _LOGGING_CONFIGURED = True
69
+
70
+
71
+ def get_logger(name: str) -> logging.Logger:
72
+ """Return a module-specific logger."""
73
+
74
+ _configure_root()
75
+ return logging.getLogger(name)
76
+
77
+
78
+ class WarnOnce:
79
+ """Log a warning only once for each unique key.
80
+
81
+ ``WarnOnce`` tracks seen keys in a bounded :class:`set`. When ``maxsize`` is
82
+ reached an arbitrary key is evicted to keep memory usage stable; ordered
83
+ eviction is intentionally avoided to keep the implementation lightweight.
84
+ Instances are callable and accept either a mapping of keys to values or a
85
+ single key/value pair. Passing ``maxsize <= 0`` disables caching and logs on
86
+ every invocation.
87
+ """
88
+
89
+ def __init__(self, logger: logging.Logger, msg: str, *, maxsize: int = 1024) -> None:
90
+ self._logger = logger
91
+ self._msg = msg
92
+ self._maxsize = maxsize
93
+ self._seen: set[Hashable] = set()
94
+ self._lock = threading.Lock()
95
+
96
+ def _mark_seen(self, key: Hashable) -> bool:
97
+ """Return ``True`` when ``key`` has not been seen before."""
98
+
99
+ if self._maxsize <= 0:
100
+ # Caching disabled – always log.
101
+ return True
102
+ if key in self._seen:
103
+ return False
104
+ if len(self._seen) >= self._maxsize:
105
+ # ``set.pop()`` removes an arbitrary element which is acceptable for
106
+ # this lightweight cache.
107
+ self._seen.pop()
108
+ self._seen.add(key)
109
+ return True
110
+
111
+ def __call__(
112
+ self,
113
+ data: Mapping[Hashable, Any] | Hashable,
114
+ value: Any | None = None,
115
+ ) -> None:
116
+ """Log new keys found in ``data``.
117
+
118
+ ``data`` may be a mapping of keys to payloads or a single key. When
119
+ called with a single key ``value`` customises the payload passed to the
120
+ logging message; the key itself is used when ``value`` is omitted.
121
+ """
122
+
123
+ if isinstance(data, Mapping):
124
+ new_items: dict[Hashable, Any] = {}
125
+ with self._lock:
126
+ for key, item_value in data.items():
127
+ if self._mark_seen(key):
128
+ new_items[key] = item_value
129
+ if new_items:
130
+ self._logger.warning(self._msg, new_items)
131
+ return
132
+
133
+ key = data
134
+ payload = value if value is not None else data
135
+ with self._lock:
136
+ should_log = self._mark_seen(key)
137
+ if should_log:
138
+ self._logger.warning(self._msg, payload)
139
+
140
+ def clear(self) -> None:
141
+ """Reset tracked keys."""
142
+
143
+ with self._lock:
144
+ self._seen.clear()
145
+
146
+
147
+ def warn_once(
148
+ logger: logging.Logger,
149
+ msg: str,
150
+ *,
151
+ maxsize: int = 1024,
152
+ ) -> WarnOnce:
153
+ """Return a :class:`WarnOnce` logger."""
154
+
155
+ return WarnOnce(logger, msg, maxsize=maxsize)
156
+
157
+
158
+ _FAILED_IMPORT_LIMIT = 128
159
+ _DEFAULT_CACHE_SIZE = 128
160
+
161
+ _SUCCESS_CACHE_NAME = "import.success"
162
+ _FAILURE_CACHE_NAME = "import.failure"
163
+
164
+
165
+ def _import_key(module_name: str, attr: str | None) -> str:
166
+ return module_name if attr is None else f"{module_name}.{attr}"
167
+
168
+
169
+ @dataclass(slots=True)
170
+ class ImportRegistry:
171
+ """Process-wide registry tracking failed imports and emitted warnings."""
172
+
173
+ limit: int = 128
174
+ failed: OrderedDict[str, None] = field(default_factory=OrderedDict)
175
+ warned: set[str] = field(default_factory=set)
176
+ lock: threading.Lock = field(default_factory=threading.Lock)
177
+
178
+ def _insert(self, key: str) -> None:
179
+ self.failed[key] = None
180
+ self.failed.move_to_end(key)
181
+ while len(self.failed) > self.limit:
182
+ self.failed.popitem(last=False)
183
+
184
+ def record_failure(self, key: str, *, module: str | None = None) -> None:
185
+ """Record ``key`` and, optionally, ``module`` as failed imports."""
186
+
187
+ with self.lock:
188
+ self._insert(key)
189
+ if module and module != key:
190
+ self._insert(module)
191
+
192
+ def discard(self, key: str) -> None:
193
+ """Remove ``key`` from the registry and clear its warning state."""
194
+
195
+ with self.lock:
196
+ self.failed.pop(key, None)
197
+ self.warned.discard(key)
198
+
199
+ def mark_warning(self, module: str) -> bool:
200
+ """Mark ``module`` as warned and return ``True`` if it was new."""
201
+
202
+ with self.lock:
203
+ if module in self.warned:
204
+ return False
205
+ self.warned.add(module)
206
+ return True
207
+
208
+ def clear(self) -> None:
209
+ """Remove all failure records and warning markers."""
210
+
211
+ with self.lock:
212
+ self.failed.clear()
213
+ self.warned.clear()
214
+
215
+ def __contains__(self, key: str) -> bool: # pragma: no cover - trivial
216
+ with self.lock:
217
+ return key in self.failed
218
+
219
+
220
+ # Successful imports are cached so lazy proxies can resolve once and later
221
+ # requests return the concrete object without recreating the proxy. The cache
222
+ # stores weak references whenever possible so unused imports can be collected
223
+ # after external references disappear.
224
+
225
+
226
+ class _CacheEntry:
227
+ """Container storing either a weak or strong reference to a value."""
228
+
229
+ __slots__ = ("_kind", "_value")
230
+
231
+ def __init__(
232
+ self,
233
+ value: Any,
234
+ *,
235
+ key: str,
236
+ remover: Callable[[str, weakref.ReferenceType[Any]], None],
237
+ ) -> None:
238
+ try:
239
+ reference = weakref.ref(value, lambda ref, key=key: remover(key, ref))
240
+ except TypeError:
241
+ self._kind = "strong"
242
+ self._value = value
243
+ else:
244
+ self._kind = "weak"
245
+ self._value = reference
246
+
247
+ def get(self) -> Any | None:
248
+ if self._kind == "weak":
249
+ return self._value()
250
+ return self._value
251
+
252
+ def matches(self, ref: weakref.ReferenceType[Any]) -> bool:
253
+ return self._kind == "weak" and self._value is ref
254
+
255
+
256
+ _IMPORT_CACHE_MANAGER = CacheManager(default_capacity=_DEFAULT_CACHE_SIZE)
257
+
258
+
259
+ def _success_cache_factory() -> OrderedDict[str, _CacheEntry]:
260
+ return OrderedDict()
261
+
262
+
263
+ def _failure_cache_factory() -> OrderedDict[str, Exception]:
264
+ return OrderedDict()
265
+
266
+
267
+ _IMPORT_CACHE_MANAGER.register(_SUCCESS_CACHE_NAME, _success_cache_factory)
268
+ _IMPORT_CACHE_MANAGER.register(_FAILURE_CACHE_NAME, _failure_cache_factory)
269
+
270
+
271
+ def _remove_success_entry(key: str, ref: weakref.ReferenceType[Any]) -> None:
272
+
273
+ def _cleanup(cache: OrderedDict[str, _CacheEntry]) -> OrderedDict[str, _CacheEntry]:
274
+ entry = cache.get(key)
275
+ if entry is not None and entry.matches(ref):
276
+ cache.pop(key, None)
277
+ _IMPORT_CACHE_MANAGER.increment_eviction(_SUCCESS_CACHE_NAME)
278
+ return cache
279
+
280
+ _IMPORT_CACHE_MANAGER.update(_SUCCESS_CACHE_NAME, _cleanup)
281
+
282
+
283
+ def _trim_cache(name: str, cache: OrderedDict[str, Any]) -> None:
284
+ capacity = _IMPORT_CACHE_MANAGER.get_capacity(name)
285
+ if capacity is None:
286
+ return
287
+ while len(cache) > capacity:
288
+ cache.popitem(last=False)
289
+ _IMPORT_CACHE_MANAGER.increment_eviction(name)
290
+
291
+
292
+ def _get_success(key: str) -> Any | None:
293
+ result: Any | None = None
294
+ hit = False
295
+
296
+ with _IMPORT_CACHE_MANAGER.timer(_SUCCESS_CACHE_NAME):
297
+
298
+ def _lookup(cache: OrderedDict[str, _CacheEntry]) -> OrderedDict[str, _CacheEntry]:
299
+ nonlocal result, hit
300
+ entry = cache.get(key)
301
+ if entry is None:
302
+ return cache
303
+ value = entry.get()
304
+ if value is None:
305
+ cache.pop(key, None)
306
+ _IMPORT_CACHE_MANAGER.increment_eviction(_SUCCESS_CACHE_NAME)
307
+ return cache
308
+ cache.move_to_end(key)
309
+ result = value
310
+ hit = True
311
+ return cache
312
+
313
+ _IMPORT_CACHE_MANAGER.update(_SUCCESS_CACHE_NAME, _lookup)
314
+ if hit:
315
+ _IMPORT_CACHE_MANAGER.increment_hit(_SUCCESS_CACHE_NAME)
316
+ return result
317
+ _IMPORT_CACHE_MANAGER.increment_miss(_SUCCESS_CACHE_NAME)
318
+ return None
319
+
320
+
321
+ def _store_success(key: str, value: Any) -> None:
322
+ entry = _CacheEntry(value, key=key, remover=_remove_success_entry)
323
+
324
+ def _store(cache: OrderedDict[str, _CacheEntry]) -> OrderedDict[str, _CacheEntry]:
325
+ cache[key] = entry
326
+ cache.move_to_end(key)
327
+ _trim_cache(_SUCCESS_CACHE_NAME, cache)
328
+ return cache
329
+
330
+ def _purge_failure(cache: OrderedDict[str, Exception]) -> OrderedDict[str, Exception]:
331
+ if cache.pop(key, None) is not None:
332
+ _IMPORT_CACHE_MANAGER.increment_eviction(_FAILURE_CACHE_NAME)
333
+ return cache
334
+
335
+ _IMPORT_CACHE_MANAGER.update(_SUCCESS_CACHE_NAME, _store)
336
+ _IMPORT_CACHE_MANAGER.update(_FAILURE_CACHE_NAME, _purge_failure)
337
+
338
+
339
+ def _get_failure(key: str) -> Exception | None:
340
+ result: Exception | None = None
341
+ hit = False
342
+
343
+ with _IMPORT_CACHE_MANAGER.timer(_FAILURE_CACHE_NAME):
344
+
345
+ def _lookup(cache: OrderedDict[str, Exception]) -> OrderedDict[str, Exception]:
346
+ nonlocal result, hit
347
+ exc = cache.get(key)
348
+ if exc is None:
349
+ return cache
350
+ cache.move_to_end(key)
351
+ result = exc
352
+ hit = True
353
+ return cache
354
+
355
+ _IMPORT_CACHE_MANAGER.update(_FAILURE_CACHE_NAME, _lookup)
356
+ if hit:
357
+ _IMPORT_CACHE_MANAGER.increment_hit(_FAILURE_CACHE_NAME)
358
+ return result
359
+ _IMPORT_CACHE_MANAGER.increment_miss(_FAILURE_CACHE_NAME)
360
+ return None
361
+
362
+
363
+ def _store_failure(key: str, exc: Exception) -> None:
364
+
365
+ def _store(cache: OrderedDict[str, Exception]) -> OrderedDict[str, Exception]:
366
+ cache[key] = exc
367
+ cache.move_to_end(key)
368
+ _trim_cache(_FAILURE_CACHE_NAME, cache)
369
+ return cache
370
+
371
+ def _purge_success(cache: OrderedDict[str, _CacheEntry]) -> OrderedDict[str, _CacheEntry]:
372
+ if cache.pop(key, None) is not None:
373
+ _IMPORT_CACHE_MANAGER.increment_eviction(_SUCCESS_CACHE_NAME)
374
+ return cache
375
+
376
+ _IMPORT_CACHE_MANAGER.update(_FAILURE_CACHE_NAME, _store)
377
+ _IMPORT_CACHE_MANAGER.update(_SUCCESS_CACHE_NAME, _purge_success)
378
+
379
+
380
+ def _clear_import_cache() -> None:
381
+ _IMPORT_CACHE_MANAGER.clear()
382
+
383
+
384
+ _IMPORT_STATE = ImportRegistry()
385
+ # Public alias to ease direct introspection in tests and diagnostics.
386
+ IMPORT_LOG = _IMPORT_STATE
387
+
388
+
389
+ def _reset_import_state() -> None:
390
+ """Reset cached import tracking structures."""
391
+
392
+ global _IMPORT_STATE, IMPORT_LOG
393
+ _IMPORT_STATE = ImportRegistry()
394
+ IMPORT_LOG = _IMPORT_STATE
395
+ _clear_import_cache()
396
+
397
+
398
+ def _import_cached(module_name: str, attr: str | None) -> tuple[bool, Any]:
399
+ """Import ``module_name`` (and optional ``attr``) capturing failures."""
400
+
401
+ key = _import_key(module_name, attr)
402
+ cached_value = _get_success(key)
403
+ if cached_value is not None:
404
+ return True, cached_value
405
+
406
+ cached_failure = _get_failure(key)
407
+ if cached_failure is not None:
408
+ return False, cached_failure
409
+
410
+ try:
411
+ module = importlib.import_module(module_name)
412
+ obj = getattr(module, attr) if attr else module
413
+ except (ImportError, AttributeError) as exc:
414
+ _store_failure(key, exc)
415
+ return False, exc
416
+
417
+ _store_success(key, obj)
418
+ return True, obj
419
+
420
+
421
+ logger = get_logger(__name__)
422
+
423
+
424
+ def _format_failure_message(module: str, attr: str | None, err: Exception) -> str:
425
+ """Return a standardised failure message."""
426
+
427
+ return (
428
+ f"Failed to import module '{module}': {err}"
429
+ if isinstance(err, ImportError)
430
+ else f"Module '{module}' has no attribute '{attr}': {err}"
431
+ )
432
+
433
+
434
+ EMIT_MAP: dict[str, Callable[[str], None]] = {
435
+ "warn": lambda msg: _emit(msg, "warn"),
436
+ "log": lambda msg: _emit(msg, "log"),
437
+ "both": lambda msg: _emit(msg, "both"),
438
+ }
439
+
440
+
441
+ def _emit(message: str, mode: Literal["warn", "log", "both"]) -> None:
442
+ """Emit ``message`` via :mod:`warnings`, logger or both."""
443
+
444
+ if mode in ("warn", "both"):
445
+ warnings.warn(message, RuntimeWarning, stacklevel=2)
446
+ if mode in ("log", "both"):
447
+ logger.warning(message)
448
+
449
+
450
+ def _warn_failure(
451
+ module: str,
452
+ attr: str | None,
453
+ err: Exception,
454
+ *,
455
+ emit: Literal["warn", "log", "both"] = "warn",
456
+ ) -> None:
457
+ """Emit a warning about a failed import."""
458
+
459
+ msg = _format_failure_message(module, attr, err)
460
+ if _IMPORT_STATE.mark_warning(module):
461
+ EMIT_MAP[emit](msg)
462
+ else:
463
+ logger.debug(msg)
464
+
465
+
466
+ class LazyImportProxy:
467
+ """Descriptor that defers imports until first use."""
468
+
469
+ __slots__ = (
470
+ "_module",
471
+ "_attr",
472
+ "_emit",
473
+ "_fallback",
474
+ "_target_ref",
475
+ "_strong_target",
476
+ "_lock",
477
+ "_key",
478
+ "__weakref__",
479
+ )
480
+
481
+ _UNRESOLVED = object()
482
+
483
+ def __init__(
484
+ self,
485
+ module_name: str,
486
+ attr: str | None,
487
+ emit: Literal["warn", "log", "both"],
488
+ fallback: Any | None,
489
+ ) -> None:
490
+ self._module = module_name
491
+ self._attr = attr
492
+ self._emit = emit
493
+ self._fallback = fallback
494
+ self._target_ref: weakref.ReferenceType[Any] | None = None
495
+ self._strong_target: Any = self._UNRESOLVED
496
+ self._lock = threading.Lock()
497
+ self._key = _import_key(module_name, attr)
498
+
499
+ def _store_target(self, target: Any) -> None:
500
+ try:
501
+ self_ref = weakref.ref(self)
502
+
503
+ def _cleanup(ref: weakref.ReferenceType[Any]) -> None:
504
+ proxy = self_ref()
505
+ if proxy is None:
506
+ return
507
+ with proxy._lock:
508
+ if proxy._target_ref is ref:
509
+ proxy._target_ref = None
510
+
511
+ self._target_ref = weakref.ref(target, _cleanup)
512
+ except TypeError:
513
+ self._strong_target = target
514
+ self._target_ref = None
515
+ else:
516
+ self._strong_target = self._UNRESOLVED
517
+
518
+ def _resolved_target(self) -> Any:
519
+ if self._strong_target is not self._UNRESOLVED:
520
+ return self._strong_target
521
+ if self._target_ref is None:
522
+ return self._UNRESOLVED
523
+ target = self._target_ref()
524
+ if target is None:
525
+ self._target_ref = None
526
+ return self._UNRESOLVED
527
+ return target
528
+
529
+ def _resolve(self) -> Any:
530
+ target = self._resolved_target()
531
+ if target is not self._UNRESOLVED:
532
+ return target
533
+
534
+ with self._lock:
535
+ target = self._resolved_target()
536
+ if target is self._UNRESOLVED:
537
+ target = _resolve_import(
538
+ self._module,
539
+ self._attr,
540
+ self._emit,
541
+ self._fallback,
542
+ )
543
+ self._store_target(target)
544
+ return target
545
+
546
+ def resolve(self) -> Any:
547
+ """Eagerly resolve and return the proxied object."""
548
+
549
+ return self._resolve()
550
+
551
+ def __getattr__(self, item: str) -> Any:
552
+ return getattr(self._resolve(), item)
553
+
554
+ def __call__(self, *args: Any, **kwargs: Any) -> Any:
555
+ return self._resolve()(*args, **kwargs)
556
+
557
+ def __bool__(self) -> bool:
558
+ return bool(self._resolve())
559
+
560
+ def __repr__(self) -> str: # pragma: no cover - representation helper
561
+ target = self._resolved_target()
562
+ if target is self._UNRESOLVED:
563
+ return f"<LazyImportProxy pending={self._key!r}>"
564
+ return repr(target)
565
+
566
+ def __str__(self) -> str: # pragma: no cover - representation helper
567
+ return str(self._resolve())
568
+
569
+ def __iter__(self) -> Iterator[Any]: # pragma: no cover - passthrough helper
570
+ return iter(self._resolve())
571
+
572
+
573
+ def _resolve_import(
574
+ module_name: str,
575
+ attr: str | None,
576
+ emit: Literal["warn", "log", "both"],
577
+ fallback: Any | None,
578
+ ) -> Any | None:
579
+ key = _import_key(module_name, attr)
580
+ success, result = _import_cached(module_name, attr)
581
+ if success:
582
+ _IMPORT_STATE.discard(key)
583
+ if attr is not None:
584
+ _IMPORT_STATE.discard(module_name)
585
+ return result
586
+
587
+ exc: Exception = result
588
+ include_module = isinstance(exc, ImportError)
589
+ _warn_failure(module_name, attr, exc, emit=emit)
590
+ _IMPORT_STATE.record_failure(key, module=module_name if include_module else None)
591
+ return fallback
592
+
593
+
594
+ def cached_import(
595
+ module_name: str,
596
+ attr: str | None = None,
597
+ *,
598
+ fallback: Any | None = None,
599
+ emit: Literal["warn", "log", "both"] = "warn",
600
+ lazy: bool = False,
601
+ ) -> Any | None:
602
+ """Import ``module_name`` (and optional ``attr``) with caching and fallback.
603
+
604
+ When ``lazy`` is ``True`` the import is deferred until the returned proxy is
605
+ first used. The proxy integrates with the shared cache so subsequent calls
606
+ return the resolved object directly.
607
+ """
608
+
609
+ key = _import_key(module_name, attr)
610
+
611
+ if lazy:
612
+ cached_obj = _get_success(key)
613
+ if cached_obj is not None:
614
+ return cached_obj
615
+ return LazyImportProxy(module_name, attr, emit, fallback)
616
+
617
+ return _resolve_import(module_name, attr, emit, fallback)
618
+
619
+
620
+ _ModuleSpec = str | tuple[str, str | None]
621
+
622
+
623
+ def _normalise_warm_specs(
624
+ module: _ModuleSpec | Iterable[_ModuleSpec],
625
+ extra: tuple[_ModuleSpec, ...],
626
+ attr: str | None,
627
+ ) -> list[tuple[str, str | None]]:
628
+ if attr is not None:
629
+ if extra:
630
+ raise ValueError("'attr' can only be combined with a single module name")
631
+ if not isinstance(module, str):
632
+ raise TypeError("'attr' requires the first argument to be a module name string")
633
+ return [(module, attr)]
634
+
635
+ specs: list[_ModuleSpec]
636
+ if extra:
637
+ specs = [module, *extra]
638
+ elif isinstance(module, tuple) and len(module) == 2:
639
+ specs = [module]
640
+ elif isinstance(module, str):
641
+ specs = [module]
642
+ else:
643
+ if isinstance(module, Iterable):
644
+ specs = list(module)
645
+ if not specs:
646
+ raise ValueError("At least one module specification is required")
647
+ else:
648
+ raise TypeError("Unsupported module specification for warm_cached_import")
649
+
650
+ normalised: list[tuple[str, str | None]] = []
651
+ for spec in specs:
652
+ if isinstance(spec, str):
653
+ normalised.append((spec, None))
654
+ continue
655
+ if isinstance(spec, tuple) and len(spec) == 2:
656
+ module_name, module_attr = spec
657
+ if not isinstance(module_name, str) or (
658
+ module_attr is not None and not isinstance(module_attr, str)
659
+ ):
660
+ raise TypeError("Invalid module specification for warm_cached_import")
661
+ normalised.append((module_name, module_attr))
662
+ continue
663
+ raise TypeError("Module specifications must be strings or (module, attr) tuples")
664
+
665
+ return normalised
666
+
667
+
668
+ def warm_cached_import(
669
+ module: _ModuleSpec | Iterable[_ModuleSpec],
670
+ *extra: _ModuleSpec,
671
+ attr: str | None = None,
672
+ fallback: Any | None = None,
673
+ emit: Literal["warn", "log", "both"] = "warn",
674
+ lazy: bool = False,
675
+ resolve: bool = False,
676
+ ) -> Any | dict[str, Any | None]:
677
+ """Pre-populate the import cache for the provided module specifications.
678
+
679
+ When ``lazy`` is ``True`` the cached objects are returned as proxies by
680
+ default. Setting ``resolve`` forces those proxies to resolve immediately
681
+ during the warm-up phase while still sharing the same cache entries.
682
+ """
683
+
684
+ if resolve and not lazy:
685
+ raise ValueError("'resolve' can only be used when 'lazy' is True")
686
+
687
+ specs = _normalise_warm_specs(module, extra, attr)
688
+ results: dict[str, Any | None] = {}
689
+ for module_name, module_attr in specs:
690
+ key = _import_key(module_name, module_attr)
691
+ results[key] = cached_import(
692
+ module_name,
693
+ module_attr,
694
+ fallback=fallback,
695
+ emit=emit,
696
+ lazy=lazy,
697
+ )
698
+ if resolve and isinstance(results[key], LazyImportProxy):
699
+ results[key] = results[key].resolve()
700
+
701
+ if len(results) == 1:
702
+ return next(iter(results.values()))
703
+ return results
704
+
705
+
706
+ def _clear_default_cache() -> None:
707
+ global _NP_MISSING_LOGGED
708
+
709
+ _clear_import_cache()
710
+ _NP_MISSING_LOGGED = False
711
+
712
+
713
+ cached_import.cache_clear = _clear_default_cache # type: ignore[attr-defined]
714
+
715
+
716
+ _NP_MISSING_LOGGED = False
717
+
718
+
719
+ def get_numpy() -> Any | None:
720
+ """Return the cached :mod:`numpy` module when available."""
721
+
722
+ global _NP_MISSING_LOGGED
723
+
724
+ np = cached_import("numpy")
725
+ if np is None:
726
+ if not _NP_MISSING_LOGGED:
727
+ logger.debug("Failed to import numpy; continuing in non-vectorised mode")
728
+ _NP_MISSING_LOGGED = True
729
+ return None
730
+
731
+ if _NP_MISSING_LOGGED:
732
+ _NP_MISSING_LOGGED = False
733
+ return np
734
+
735
+
736
+ def get_nodenx() -> type | None:
737
+ """Return :class:`tnfr.node.NodeNX` using import caching."""
738
+
739
+ return cached_import("tnfr.node", "NodeNX")
740
+
741
+
742
+ def prune_failed_imports() -> None:
743
+ """Clear the registry of recorded import failures and warnings."""
744
+
745
+ _IMPORT_STATE.clear()
746
+ _IMPORT_CACHE_MANAGER.clear(_FAILURE_CACHE_NAME)