tnfr 4.5.2__py3-none-any.whl → 8.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tnfr might be problematic. Click here for more details.
- tnfr/__init__.py +334 -50
- tnfr/__init__.pyi +33 -0
- tnfr/_compat.py +10 -0
- tnfr/_generated_version.py +34 -0
- tnfr/_version.py +49 -0
- tnfr/_version.pyi +7 -0
- tnfr/alias.py +214 -37
- tnfr/alias.pyi +108 -0
- tnfr/backends/__init__.py +354 -0
- tnfr/backends/jax_backend.py +173 -0
- tnfr/backends/numpy_backend.py +238 -0
- tnfr/backends/optimized_numpy.py +420 -0
- tnfr/backends/torch_backend.py +408 -0
- tnfr/cache.py +149 -556
- tnfr/cache.pyi +13 -0
- tnfr/cli/__init__.py +51 -16
- tnfr/cli/__init__.pyi +26 -0
- tnfr/cli/arguments.py +344 -32
- tnfr/cli/arguments.pyi +29 -0
- tnfr/cli/execution.py +676 -50
- tnfr/cli/execution.pyi +70 -0
- tnfr/cli/interactive_validator.py +614 -0
- tnfr/cli/utils.py +18 -3
- tnfr/cli/utils.pyi +7 -0
- tnfr/cli/validate.py +236 -0
- tnfr/compat/__init__.py +85 -0
- tnfr/compat/dataclass.py +136 -0
- tnfr/compat/jsonschema_stub.py +61 -0
- tnfr/compat/matplotlib_stub.py +73 -0
- tnfr/compat/numpy_stub.py +155 -0
- tnfr/config/__init__.py +224 -0
- tnfr/config/__init__.pyi +10 -0
- tnfr/{constants_glyphs.py → config/constants.py} +26 -20
- tnfr/config/constants.pyi +12 -0
- tnfr/config/defaults.py +54 -0
- tnfr/{constants/core.py → config/defaults_core.py} +59 -6
- tnfr/config/defaults_init.py +33 -0
- tnfr/config/defaults_metric.py +104 -0
- tnfr/config/feature_flags.py +81 -0
- tnfr/config/feature_flags.pyi +16 -0
- tnfr/config/glyph_constants.py +31 -0
- tnfr/config/init.py +77 -0
- tnfr/config/init.pyi +8 -0
- tnfr/config/operator_names.py +254 -0
- tnfr/config/operator_names.pyi +36 -0
- tnfr/config/physics_derivation.py +354 -0
- tnfr/config/presets.py +83 -0
- tnfr/config/presets.pyi +7 -0
- tnfr/config/security.py +927 -0
- tnfr/config/thresholds.py +114 -0
- tnfr/config/tnfr_config.py +498 -0
- tnfr/constants/__init__.py +51 -133
- tnfr/constants/__init__.pyi +92 -0
- tnfr/constants/aliases.py +33 -0
- tnfr/constants/aliases.pyi +27 -0
- tnfr/constants/init.py +3 -1
- tnfr/constants/init.pyi +12 -0
- tnfr/constants/metric.py +9 -15
- tnfr/constants/metric.pyi +19 -0
- tnfr/core/__init__.py +33 -0
- tnfr/core/container.py +226 -0
- tnfr/core/default_implementations.py +329 -0
- tnfr/core/interfaces.py +279 -0
- tnfr/dynamics/__init__.py +213 -633
- tnfr/dynamics/__init__.pyi +83 -0
- tnfr/dynamics/adaptation.py +267 -0
- tnfr/dynamics/adaptation.pyi +7 -0
- tnfr/dynamics/adaptive_sequences.py +189 -0
- tnfr/dynamics/adaptive_sequences.pyi +14 -0
- tnfr/dynamics/aliases.py +23 -0
- tnfr/dynamics/aliases.pyi +19 -0
- tnfr/dynamics/bifurcation.py +232 -0
- tnfr/dynamics/canonical.py +229 -0
- tnfr/dynamics/canonical.pyi +48 -0
- tnfr/dynamics/coordination.py +385 -0
- tnfr/dynamics/coordination.pyi +25 -0
- tnfr/dynamics/dnfr.py +2699 -398
- tnfr/dynamics/dnfr.pyi +26 -0
- tnfr/dynamics/dynamic_limits.py +225 -0
- tnfr/dynamics/feedback.py +252 -0
- tnfr/dynamics/feedback.pyi +24 -0
- tnfr/dynamics/fused_dnfr.py +454 -0
- tnfr/dynamics/homeostasis.py +157 -0
- tnfr/dynamics/homeostasis.pyi +14 -0
- tnfr/dynamics/integrators.py +496 -102
- tnfr/dynamics/integrators.pyi +36 -0
- tnfr/dynamics/learning.py +310 -0
- tnfr/dynamics/learning.pyi +33 -0
- tnfr/dynamics/metabolism.py +254 -0
- tnfr/dynamics/nbody.py +796 -0
- tnfr/dynamics/nbody_tnfr.py +783 -0
- tnfr/dynamics/propagation.py +326 -0
- tnfr/dynamics/runtime.py +908 -0
- tnfr/dynamics/runtime.pyi +77 -0
- tnfr/dynamics/sampling.py +10 -5
- tnfr/dynamics/sampling.pyi +7 -0
- tnfr/dynamics/selectors.py +711 -0
- tnfr/dynamics/selectors.pyi +85 -0
- tnfr/dynamics/structural_clip.py +207 -0
- tnfr/errors/__init__.py +37 -0
- tnfr/errors/contextual.py +492 -0
- tnfr/execution.py +77 -55
- tnfr/execution.pyi +45 -0
- tnfr/extensions/__init__.py +205 -0
- tnfr/extensions/__init__.pyi +18 -0
- tnfr/extensions/base.py +173 -0
- tnfr/extensions/base.pyi +35 -0
- tnfr/extensions/business/__init__.py +71 -0
- tnfr/extensions/business/__init__.pyi +11 -0
- tnfr/extensions/business/cookbook.py +88 -0
- tnfr/extensions/business/cookbook.pyi +8 -0
- tnfr/extensions/business/health_analyzers.py +202 -0
- tnfr/extensions/business/health_analyzers.pyi +9 -0
- tnfr/extensions/business/patterns.py +183 -0
- tnfr/extensions/business/patterns.pyi +8 -0
- tnfr/extensions/medical/__init__.py +73 -0
- tnfr/extensions/medical/__init__.pyi +11 -0
- tnfr/extensions/medical/cookbook.py +88 -0
- tnfr/extensions/medical/cookbook.pyi +8 -0
- tnfr/extensions/medical/health_analyzers.py +181 -0
- tnfr/extensions/medical/health_analyzers.pyi +9 -0
- tnfr/extensions/medical/patterns.py +163 -0
- tnfr/extensions/medical/patterns.pyi +8 -0
- tnfr/flatten.py +29 -50
- tnfr/flatten.pyi +21 -0
- tnfr/gamma.py +66 -53
- tnfr/gamma.pyi +36 -0
- tnfr/glyph_history.py +144 -57
- tnfr/glyph_history.pyi +35 -0
- tnfr/glyph_runtime.py +19 -0
- tnfr/glyph_runtime.pyi +8 -0
- tnfr/immutable.py +70 -30
- tnfr/immutable.pyi +36 -0
- tnfr/initialization.py +22 -16
- tnfr/initialization.pyi +65 -0
- tnfr/io.py +5 -241
- tnfr/io.pyi +13 -0
- tnfr/locking.pyi +7 -0
- tnfr/mathematics/__init__.py +79 -0
- tnfr/mathematics/backend.py +453 -0
- tnfr/mathematics/backend.pyi +99 -0
- tnfr/mathematics/dynamics.py +408 -0
- tnfr/mathematics/dynamics.pyi +90 -0
- tnfr/mathematics/epi.py +391 -0
- tnfr/mathematics/epi.pyi +65 -0
- tnfr/mathematics/generators.py +242 -0
- tnfr/mathematics/generators.pyi +29 -0
- tnfr/mathematics/metrics.py +119 -0
- tnfr/mathematics/metrics.pyi +16 -0
- tnfr/mathematics/operators.py +239 -0
- tnfr/mathematics/operators.pyi +59 -0
- tnfr/mathematics/operators_factory.py +124 -0
- tnfr/mathematics/operators_factory.pyi +11 -0
- tnfr/mathematics/projection.py +87 -0
- tnfr/mathematics/projection.pyi +33 -0
- tnfr/mathematics/runtime.py +182 -0
- tnfr/mathematics/runtime.pyi +64 -0
- tnfr/mathematics/spaces.py +256 -0
- tnfr/mathematics/spaces.pyi +83 -0
- tnfr/mathematics/transforms.py +305 -0
- tnfr/mathematics/transforms.pyi +62 -0
- tnfr/metrics/__init__.py +47 -9
- tnfr/metrics/__init__.pyi +20 -0
- tnfr/metrics/buffer_cache.py +163 -0
- tnfr/metrics/buffer_cache.pyi +24 -0
- tnfr/metrics/cache_utils.py +214 -0
- tnfr/metrics/coherence.py +1510 -330
- tnfr/metrics/coherence.pyi +129 -0
- tnfr/metrics/common.py +23 -16
- tnfr/metrics/common.pyi +35 -0
- tnfr/metrics/core.py +251 -36
- tnfr/metrics/core.pyi +13 -0
- tnfr/metrics/diagnosis.py +709 -110
- tnfr/metrics/diagnosis.pyi +86 -0
- tnfr/metrics/emergence.py +245 -0
- tnfr/metrics/export.py +60 -18
- tnfr/metrics/export.pyi +7 -0
- tnfr/metrics/glyph_timing.py +233 -43
- tnfr/metrics/glyph_timing.pyi +81 -0
- tnfr/metrics/learning_metrics.py +280 -0
- tnfr/metrics/learning_metrics.pyi +21 -0
- tnfr/metrics/phase_coherence.py +351 -0
- tnfr/metrics/phase_compatibility.py +349 -0
- tnfr/metrics/reporting.py +63 -28
- tnfr/metrics/reporting.pyi +25 -0
- tnfr/metrics/sense_index.py +1126 -43
- tnfr/metrics/sense_index.pyi +9 -0
- tnfr/metrics/trig.py +215 -23
- tnfr/metrics/trig.pyi +13 -0
- tnfr/metrics/trig_cache.py +148 -24
- tnfr/metrics/trig_cache.pyi +10 -0
- tnfr/multiscale/__init__.py +32 -0
- tnfr/multiscale/hierarchical.py +517 -0
- tnfr/node.py +646 -140
- tnfr/node.pyi +139 -0
- tnfr/observers.py +160 -45
- tnfr/observers.pyi +31 -0
- tnfr/ontosim.py +23 -19
- tnfr/ontosim.pyi +28 -0
- tnfr/operators/__init__.py +1358 -106
- tnfr/operators/__init__.pyi +31 -0
- tnfr/operators/algebra.py +277 -0
- tnfr/operators/canonical_patterns.py +420 -0
- tnfr/operators/cascade.py +267 -0
- tnfr/operators/cycle_detection.py +358 -0
- tnfr/operators/definitions.py +4108 -0
- tnfr/operators/definitions.pyi +78 -0
- tnfr/operators/grammar.py +1164 -0
- tnfr/operators/grammar.pyi +140 -0
- tnfr/operators/hamiltonian.py +710 -0
- tnfr/operators/health_analyzer.py +809 -0
- tnfr/operators/jitter.py +107 -38
- tnfr/operators/jitter.pyi +11 -0
- tnfr/operators/lifecycle.py +314 -0
- tnfr/operators/metabolism.py +618 -0
- tnfr/operators/metrics.py +2138 -0
- tnfr/operators/network_analysis/__init__.py +27 -0
- tnfr/operators/network_analysis/source_detection.py +186 -0
- tnfr/operators/nodal_equation.py +395 -0
- tnfr/operators/pattern_detection.py +660 -0
- tnfr/operators/patterns.py +669 -0
- tnfr/operators/postconditions/__init__.py +38 -0
- tnfr/operators/postconditions/mutation.py +236 -0
- tnfr/operators/preconditions/__init__.py +1226 -0
- tnfr/operators/preconditions/coherence.py +305 -0
- tnfr/operators/preconditions/dissonance.py +236 -0
- tnfr/operators/preconditions/emission.py +128 -0
- tnfr/operators/preconditions/mutation.py +580 -0
- tnfr/operators/preconditions/reception.py +125 -0
- tnfr/operators/preconditions/resonance.py +364 -0
- tnfr/operators/registry.py +74 -0
- tnfr/operators/registry.pyi +9 -0
- tnfr/operators/remesh.py +1415 -91
- tnfr/operators/remesh.pyi +26 -0
- tnfr/operators/structural_units.py +268 -0
- tnfr/operators/unified_grammar.py +105 -0
- tnfr/parallel/__init__.py +54 -0
- tnfr/parallel/auto_scaler.py +234 -0
- tnfr/parallel/distributed.py +384 -0
- tnfr/parallel/engine.py +238 -0
- tnfr/parallel/gpu_engine.py +420 -0
- tnfr/parallel/monitoring.py +248 -0
- tnfr/parallel/partitioner.py +459 -0
- tnfr/py.typed +0 -0
- tnfr/recipes/__init__.py +22 -0
- tnfr/recipes/cookbook.py +743 -0
- tnfr/rng.py +75 -151
- tnfr/rng.pyi +26 -0
- tnfr/schemas/__init__.py +8 -0
- tnfr/schemas/grammar.json +94 -0
- tnfr/sdk/__init__.py +107 -0
- tnfr/sdk/__init__.pyi +19 -0
- tnfr/sdk/adaptive_system.py +173 -0
- tnfr/sdk/adaptive_system.pyi +21 -0
- tnfr/sdk/builders.py +370 -0
- tnfr/sdk/builders.pyi +51 -0
- tnfr/sdk/fluent.py +1121 -0
- tnfr/sdk/fluent.pyi +74 -0
- tnfr/sdk/templates.py +342 -0
- tnfr/sdk/templates.pyi +41 -0
- tnfr/sdk/utils.py +341 -0
- tnfr/secure_config.py +46 -0
- tnfr/security/__init__.py +70 -0
- tnfr/security/database.py +514 -0
- tnfr/security/subprocess.py +503 -0
- tnfr/security/validation.py +290 -0
- tnfr/selector.py +59 -22
- tnfr/selector.pyi +19 -0
- tnfr/sense.py +92 -67
- tnfr/sense.pyi +23 -0
- tnfr/services/__init__.py +17 -0
- tnfr/services/orchestrator.py +325 -0
- tnfr/sparse/__init__.py +39 -0
- tnfr/sparse/representations.py +492 -0
- tnfr/structural.py +639 -263
- tnfr/structural.pyi +83 -0
- tnfr/telemetry/__init__.py +35 -0
- tnfr/telemetry/cache_metrics.py +226 -0
- tnfr/telemetry/cache_metrics.pyi +64 -0
- tnfr/telemetry/nu_f.py +422 -0
- tnfr/telemetry/nu_f.pyi +108 -0
- tnfr/telemetry/verbosity.py +36 -0
- tnfr/telemetry/verbosity.pyi +15 -0
- tnfr/tokens.py +2 -4
- tnfr/tokens.pyi +36 -0
- tnfr/tools/__init__.py +20 -0
- tnfr/tools/domain_templates.py +478 -0
- tnfr/tools/sequence_generator.py +846 -0
- tnfr/topology/__init__.py +13 -0
- tnfr/topology/asymmetry.py +151 -0
- tnfr/trace.py +300 -126
- tnfr/trace.pyi +42 -0
- tnfr/tutorials/__init__.py +38 -0
- tnfr/tutorials/autonomous_evolution.py +285 -0
- tnfr/tutorials/interactive.py +1576 -0
- tnfr/tutorials/structural_metabolism.py +238 -0
- tnfr/types.py +743 -12
- tnfr/types.pyi +357 -0
- tnfr/units.py +68 -0
- tnfr/units.pyi +13 -0
- tnfr/utils/__init__.py +282 -0
- tnfr/utils/__init__.pyi +215 -0
- tnfr/utils/cache.py +4223 -0
- tnfr/utils/cache.pyi +470 -0
- tnfr/{callback_utils.py → utils/callbacks.py} +26 -39
- tnfr/utils/callbacks.pyi +49 -0
- tnfr/utils/chunks.py +108 -0
- tnfr/utils/chunks.pyi +22 -0
- tnfr/utils/data.py +428 -0
- tnfr/utils/data.pyi +74 -0
- tnfr/utils/graph.py +85 -0
- tnfr/utils/graph.pyi +10 -0
- tnfr/utils/init.py +821 -0
- tnfr/utils/init.pyi +80 -0
- tnfr/utils/io.py +559 -0
- tnfr/utils/io.pyi +66 -0
- tnfr/{helpers → utils}/numeric.py +51 -24
- tnfr/utils/numeric.pyi +21 -0
- tnfr/validation/__init__.py +257 -0
- tnfr/validation/__init__.pyi +85 -0
- tnfr/validation/compatibility.py +460 -0
- tnfr/validation/compatibility.pyi +6 -0
- tnfr/validation/config.py +73 -0
- tnfr/validation/graph.py +139 -0
- tnfr/validation/graph.pyi +18 -0
- tnfr/validation/input_validation.py +755 -0
- tnfr/validation/invariants.py +712 -0
- tnfr/validation/rules.py +253 -0
- tnfr/validation/rules.pyi +44 -0
- tnfr/validation/runtime.py +279 -0
- tnfr/validation/runtime.pyi +28 -0
- tnfr/validation/sequence_validator.py +162 -0
- tnfr/validation/soft_filters.py +170 -0
- tnfr/validation/soft_filters.pyi +32 -0
- tnfr/validation/spectral.py +164 -0
- tnfr/validation/spectral.pyi +42 -0
- tnfr/validation/validator.py +1266 -0
- tnfr/validation/window.py +39 -0
- tnfr/validation/window.pyi +1 -0
- tnfr/visualization/__init__.py +98 -0
- tnfr/visualization/cascade_viz.py +256 -0
- tnfr/visualization/hierarchy.py +284 -0
- tnfr/visualization/sequence_plotter.py +784 -0
- tnfr/viz/__init__.py +60 -0
- tnfr/viz/matplotlib.py +278 -0
- tnfr/viz/matplotlib.pyi +35 -0
- tnfr-8.5.0.dist-info/METADATA +573 -0
- tnfr-8.5.0.dist-info/RECORD +353 -0
- {tnfr-4.5.2.dist-info → tnfr-8.5.0.dist-info}/entry_points.txt +1 -0
- {tnfr-4.5.2.dist-info → tnfr-8.5.0.dist-info}/licenses/LICENSE.md +1 -1
- tnfr/collections_utils.py +0 -300
- tnfr/config.py +0 -32
- tnfr/grammar.py +0 -344
- tnfr/graph_utils.py +0 -84
- tnfr/helpers/__init__.py +0 -71
- tnfr/import_utils.py +0 -228
- tnfr/json_utils.py +0 -162
- tnfr/logging_utils.py +0 -116
- tnfr/presets.py +0 -60
- tnfr/validators.py +0 -84
- tnfr/value_utils.py +0 -59
- tnfr-4.5.2.dist-info/METADATA +0 -379
- tnfr-4.5.2.dist-info/RECORD +0 -67
- {tnfr-4.5.2.dist-info → tnfr-8.5.0.dist-info}/WHEEL +0 -0
- {tnfr-4.5.2.dist-info → tnfr-8.5.0.dist-info}/top_level.txt +0 -0
tnfr/utils/init.py
ADDED
|
@@ -0,0 +1,821 @@
|
|
|
1
|
+
"""Core logging and import helpers for :mod:`tnfr`.
|
|
2
|
+
|
|
3
|
+
This module merges the functionality that historically lived in
|
|
4
|
+
``tnfr.logging_utils`` and ``tnfr.import_utils``. The behaviour is kept
|
|
5
|
+
identical so downstream consumers can keep relying on the same APIs while
|
|
6
|
+
benefiting from a consolidated entry point under :mod:`tnfr.utils`.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import importlib
|
|
12
|
+
import logging
|
|
13
|
+
import threading
|
|
14
|
+
import warnings
|
|
15
|
+
import weakref
|
|
16
|
+
from collections import OrderedDict
|
|
17
|
+
from dataclasses import field
|
|
18
|
+
from typing import (
|
|
19
|
+
TYPE_CHECKING,
|
|
20
|
+
Any,
|
|
21
|
+
Callable,
|
|
22
|
+
Hashable,
|
|
23
|
+
Iterable,
|
|
24
|
+
Iterator,
|
|
25
|
+
Literal,
|
|
26
|
+
Mapping,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
from ..compat.dataclass import dataclass
|
|
30
|
+
|
|
31
|
+
if TYPE_CHECKING:
|
|
32
|
+
from .cache import CacheManager
|
|
33
|
+
|
|
34
|
+
__all__ = (
|
|
35
|
+
"_configure_root",
|
|
36
|
+
"cached_import",
|
|
37
|
+
"warm_cached_import",
|
|
38
|
+
"LazyImportProxy",
|
|
39
|
+
"get_logger",
|
|
40
|
+
"get_numpy",
|
|
41
|
+
"get_nodenx",
|
|
42
|
+
"prune_failed_imports",
|
|
43
|
+
"WarnOnce",
|
|
44
|
+
"warn_once",
|
|
45
|
+
"IMPORT_LOG",
|
|
46
|
+
"EMIT_MAP",
|
|
47
|
+
"_warn_failure",
|
|
48
|
+
"_IMPORT_STATE",
|
|
49
|
+
"_reset_logging_state",
|
|
50
|
+
"_reset_import_state",
|
|
51
|
+
"_FAILED_IMPORT_LIMIT",
|
|
52
|
+
"_DEFAULT_CACHE_SIZE",
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
_LOGGING_CONFIGURED = False
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _reset_logging_state() -> None:
|
|
59
|
+
"""Reset cached logging configuration state."""
|
|
60
|
+
|
|
61
|
+
global _LOGGING_CONFIGURED
|
|
62
|
+
_LOGGING_CONFIGURED = False
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _configure_root() -> None:
|
|
66
|
+
"""Ensure the root logger has handlers and a default format."""
|
|
67
|
+
|
|
68
|
+
global _LOGGING_CONFIGURED
|
|
69
|
+
if _LOGGING_CONFIGURED:
|
|
70
|
+
return
|
|
71
|
+
|
|
72
|
+
root = logging.getLogger()
|
|
73
|
+
if not root.handlers:
|
|
74
|
+
kwargs = {"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"}
|
|
75
|
+
if root.level == logging.NOTSET:
|
|
76
|
+
kwargs["level"] = logging.INFO
|
|
77
|
+
logging.basicConfig(**kwargs)
|
|
78
|
+
|
|
79
|
+
_LOGGING_CONFIGURED = True
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def get_logger(name: str) -> logging.Logger:
|
|
83
|
+
"""Return a module-specific logger."""
|
|
84
|
+
|
|
85
|
+
_configure_root()
|
|
86
|
+
return logging.getLogger(name)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class WarnOnce:
|
|
90
|
+
"""Log a warning only once for each unique key.
|
|
91
|
+
|
|
92
|
+
``WarnOnce`` tracks seen keys in a bounded :class:`set`. When ``maxsize`` is
|
|
93
|
+
reached an arbitrary key is evicted to keep memory usage stable; ordered
|
|
94
|
+
eviction is intentionally avoided to keep the implementation lightweight.
|
|
95
|
+
Instances are callable and accept either a mapping of keys to values or a
|
|
96
|
+
single key/value pair. Passing ``maxsize <= 0`` disables caching and logs on
|
|
97
|
+
every invocation.
|
|
98
|
+
"""
|
|
99
|
+
|
|
100
|
+
def __init__(
|
|
101
|
+
self, logger: logging.Logger, msg: str, *, maxsize: int = 1024
|
|
102
|
+
) -> None:
|
|
103
|
+
self._logger = logger
|
|
104
|
+
self._msg = msg
|
|
105
|
+
self._maxsize = maxsize
|
|
106
|
+
self._seen: set[Hashable] = set()
|
|
107
|
+
self._lock = threading.Lock()
|
|
108
|
+
|
|
109
|
+
def _mark_seen(self, key: Hashable) -> bool:
|
|
110
|
+
"""Return ``True`` when ``key`` has not been seen before."""
|
|
111
|
+
|
|
112
|
+
if self._maxsize <= 0:
|
|
113
|
+
# Caching disabled – always log.
|
|
114
|
+
return True
|
|
115
|
+
if key in self._seen:
|
|
116
|
+
return False
|
|
117
|
+
if len(self._seen) >= self._maxsize:
|
|
118
|
+
# ``set.pop()`` removes an arbitrary element which is acceptable for
|
|
119
|
+
# this lightweight cache.
|
|
120
|
+
self._seen.pop()
|
|
121
|
+
self._seen.add(key)
|
|
122
|
+
return True
|
|
123
|
+
|
|
124
|
+
def __call__(
|
|
125
|
+
self,
|
|
126
|
+
data: Mapping[Hashable, Any] | Hashable,
|
|
127
|
+
value: Any | None = None,
|
|
128
|
+
) -> None:
|
|
129
|
+
"""Log new keys found in ``data``.
|
|
130
|
+
|
|
131
|
+
``data`` may be a mapping of keys to payloads or a single key. When
|
|
132
|
+
called with a single key ``value`` customises the payload passed to the
|
|
133
|
+
logging message; the key itself is used when ``value`` is omitted.
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
if isinstance(data, Mapping):
|
|
137
|
+
new_items: dict[Hashable, Any] = {}
|
|
138
|
+
with self._lock:
|
|
139
|
+
for key, item_value in data.items():
|
|
140
|
+
if self._mark_seen(key):
|
|
141
|
+
new_items[key] = item_value
|
|
142
|
+
if new_items:
|
|
143
|
+
self._logger.warning(self._msg, new_items)
|
|
144
|
+
return
|
|
145
|
+
|
|
146
|
+
key = data
|
|
147
|
+
payload = value if value is not None else data
|
|
148
|
+
with self._lock:
|
|
149
|
+
should_log = self._mark_seen(key)
|
|
150
|
+
if should_log:
|
|
151
|
+
self._logger.warning(self._msg, payload)
|
|
152
|
+
|
|
153
|
+
def clear(self) -> None:
|
|
154
|
+
"""Reset tracked keys."""
|
|
155
|
+
|
|
156
|
+
with self._lock:
|
|
157
|
+
self._seen.clear()
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def warn_once(
|
|
161
|
+
logger: logging.Logger,
|
|
162
|
+
msg: str,
|
|
163
|
+
*,
|
|
164
|
+
maxsize: int = 1024,
|
|
165
|
+
) -> WarnOnce:
|
|
166
|
+
"""Return a :class:`WarnOnce` logger."""
|
|
167
|
+
|
|
168
|
+
return WarnOnce(logger, msg, maxsize=maxsize)
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
_FAILED_IMPORT_LIMIT = 128
|
|
172
|
+
_DEFAULT_CACHE_SIZE = 128
|
|
173
|
+
|
|
174
|
+
_SUCCESS_CACHE_NAME = "import.success"
|
|
175
|
+
_FAILURE_CACHE_NAME = "import.failure"
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def _import_key(module_name: str, attr: str | None) -> str:
|
|
179
|
+
return module_name if attr is None else f"{module_name}.{attr}"
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
@dataclass(slots=True)
|
|
183
|
+
class ImportRegistry:
|
|
184
|
+
"""Process-wide registry tracking failed imports and emitted warnings."""
|
|
185
|
+
|
|
186
|
+
limit: int = 128
|
|
187
|
+
failed: OrderedDict[str, None] = field(default_factory=OrderedDict)
|
|
188
|
+
warned: set[str] = field(default_factory=set)
|
|
189
|
+
lock: threading.Lock = field(default_factory=threading.Lock)
|
|
190
|
+
|
|
191
|
+
def _insert(self, key: str) -> None:
|
|
192
|
+
self.failed[key] = None
|
|
193
|
+
self.failed.move_to_end(key)
|
|
194
|
+
while len(self.failed) > self.limit:
|
|
195
|
+
self.failed.popitem(last=False)
|
|
196
|
+
|
|
197
|
+
def record_failure(self, key: str, *, module: str | None = None) -> None:
|
|
198
|
+
"""Record ``key`` and, optionally, ``module`` as failed imports."""
|
|
199
|
+
|
|
200
|
+
with self.lock:
|
|
201
|
+
self._insert(key)
|
|
202
|
+
if module and module != key:
|
|
203
|
+
self._insert(module)
|
|
204
|
+
|
|
205
|
+
def discard(self, key: str) -> None:
|
|
206
|
+
"""Remove ``key`` from the registry and clear its warning state."""
|
|
207
|
+
|
|
208
|
+
with self.lock:
|
|
209
|
+
self.failed.pop(key, None)
|
|
210
|
+
self.warned.discard(key)
|
|
211
|
+
|
|
212
|
+
def mark_warning(self, module: str) -> bool:
|
|
213
|
+
"""Mark ``module`` as warned and return ``True`` if it was new."""
|
|
214
|
+
|
|
215
|
+
with self.lock:
|
|
216
|
+
if module in self.warned:
|
|
217
|
+
return False
|
|
218
|
+
self.warned.add(module)
|
|
219
|
+
return True
|
|
220
|
+
|
|
221
|
+
def clear(self) -> None:
|
|
222
|
+
"""Remove all failure records and warning markers."""
|
|
223
|
+
|
|
224
|
+
with self.lock:
|
|
225
|
+
self.failed.clear()
|
|
226
|
+
self.warned.clear()
|
|
227
|
+
|
|
228
|
+
def __contains__(self, key: str) -> bool: # pragma: no cover - trivial
|
|
229
|
+
with self.lock:
|
|
230
|
+
return key in self.failed
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
# Successful imports are cached so lazy proxies can resolve once and later
|
|
234
|
+
# requests return the concrete object without recreating the proxy. The cache
|
|
235
|
+
# stores weak references whenever possible so unused imports can be collected
|
|
236
|
+
# after external references disappear.
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
class _CacheEntry:
|
|
240
|
+
"""Container storing either a weak or strong reference to a value."""
|
|
241
|
+
|
|
242
|
+
__slots__ = ("_kind", "_value")
|
|
243
|
+
|
|
244
|
+
def __init__(
|
|
245
|
+
self,
|
|
246
|
+
value: Any,
|
|
247
|
+
*,
|
|
248
|
+
key: str,
|
|
249
|
+
remover: Callable[[str, weakref.ReferenceType[Any]], None],
|
|
250
|
+
) -> None:
|
|
251
|
+
try:
|
|
252
|
+
reference = weakref.ref(value, lambda ref, key=key: remover(key, ref))
|
|
253
|
+
except TypeError:
|
|
254
|
+
self._kind = "strong"
|
|
255
|
+
self._value = value
|
|
256
|
+
else:
|
|
257
|
+
self._kind = "weak"
|
|
258
|
+
self._value = reference
|
|
259
|
+
|
|
260
|
+
def get(self) -> Any | None:
|
|
261
|
+
if self._kind == "weak":
|
|
262
|
+
return self._value()
|
|
263
|
+
return self._value
|
|
264
|
+
|
|
265
|
+
def matches(self, ref: weakref.ReferenceType[Any]) -> bool:
|
|
266
|
+
return self._kind == "weak" and self._value is ref
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
_IMPORT_CACHE_MANAGER: CacheManager | None = None
|
|
270
|
+
_IMPORT_CACHE_MANAGER_LOCK = threading.Lock()
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
def _get_import_cache_manager() -> CacheManager:
|
|
274
|
+
"""Lazily initialize and return the import cache manager.
|
|
275
|
+
|
|
276
|
+
This function breaks the circular import between init and cache modules
|
|
277
|
+
by deferring the CacheManager import until first use. The initialization
|
|
278
|
+
is thread-safe using a module-level lock to ensure only one CacheManager
|
|
279
|
+
instance is created.
|
|
280
|
+
|
|
281
|
+
Returns
|
|
282
|
+
-------
|
|
283
|
+
CacheManager
|
|
284
|
+
Singleton cache manager instance for import caching. The same instance
|
|
285
|
+
is returned on subsequent calls.
|
|
286
|
+
|
|
287
|
+
Notes
|
|
288
|
+
-----
|
|
289
|
+
Thread Safety:
|
|
290
|
+
Multiple concurrent calls are synchronized via `_IMPORT_CACHE_MANAGER_LOCK`
|
|
291
|
+
to prevent race conditions during initialization.
|
|
292
|
+
|
|
293
|
+
Structural Contract:
|
|
294
|
+
- First call: Creates and configures CacheManager
|
|
295
|
+
- Subsequent calls: Returns existing instance (no-op check)
|
|
296
|
+
- Preserves deterministic caching behavior
|
|
297
|
+
"""
|
|
298
|
+
global _IMPORT_CACHE_MANAGER
|
|
299
|
+
if _IMPORT_CACHE_MANAGER is None:
|
|
300
|
+
with _IMPORT_CACHE_MANAGER_LOCK:
|
|
301
|
+
# Double-check pattern: another thread may have initialized
|
|
302
|
+
if _IMPORT_CACHE_MANAGER is None:
|
|
303
|
+
from .cache import CacheManager
|
|
304
|
+
|
|
305
|
+
_IMPORT_CACHE_MANAGER = CacheManager(
|
|
306
|
+
default_capacity=_DEFAULT_CACHE_SIZE
|
|
307
|
+
)
|
|
308
|
+
_IMPORT_CACHE_MANAGER.register(
|
|
309
|
+
_SUCCESS_CACHE_NAME, _success_cache_factory
|
|
310
|
+
)
|
|
311
|
+
_IMPORT_CACHE_MANAGER.register(
|
|
312
|
+
_FAILURE_CACHE_NAME, _failure_cache_factory
|
|
313
|
+
)
|
|
314
|
+
return _IMPORT_CACHE_MANAGER
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
def _success_cache_factory() -> OrderedDict[str, _CacheEntry]:
|
|
318
|
+
return OrderedDict()
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
def _failure_cache_factory() -> OrderedDict[str, Exception]:
|
|
322
|
+
return OrderedDict()
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
def _remove_success_entry(key: str, ref: weakref.ReferenceType[Any]) -> None:
|
|
326
|
+
|
|
327
|
+
def _cleanup(cache: OrderedDict[str, _CacheEntry]) -> OrderedDict[str, _CacheEntry]:
|
|
328
|
+
entry = cache.get(key)
|
|
329
|
+
if entry is not None and entry.matches(ref):
|
|
330
|
+
cache.pop(key, None)
|
|
331
|
+
_get_import_cache_manager().increment_eviction(_SUCCESS_CACHE_NAME)
|
|
332
|
+
return cache
|
|
333
|
+
|
|
334
|
+
_get_import_cache_manager().update(_SUCCESS_CACHE_NAME, _cleanup)
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
def _trim_cache(name: str, cache: OrderedDict[str, Any]) -> None:
|
|
338
|
+
capacity = _get_import_cache_manager().get_capacity(name)
|
|
339
|
+
if capacity is None:
|
|
340
|
+
return
|
|
341
|
+
while len(cache) > capacity:
|
|
342
|
+
cache.popitem(last=False)
|
|
343
|
+
_get_import_cache_manager().increment_eviction(name)
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
def _get_success(key: str) -> Any | None:
|
|
347
|
+
result: Any | None = None
|
|
348
|
+
hit = False
|
|
349
|
+
|
|
350
|
+
with _get_import_cache_manager().timer(_SUCCESS_CACHE_NAME):
|
|
351
|
+
|
|
352
|
+
def _lookup(
|
|
353
|
+
cache: OrderedDict[str, _CacheEntry],
|
|
354
|
+
) -> OrderedDict[str, _CacheEntry]:
|
|
355
|
+
nonlocal result, hit
|
|
356
|
+
entry = cache.get(key)
|
|
357
|
+
if entry is None:
|
|
358
|
+
return cache
|
|
359
|
+
value = entry.get()
|
|
360
|
+
if value is None:
|
|
361
|
+
cache.pop(key, None)
|
|
362
|
+
_get_import_cache_manager().increment_eviction(_SUCCESS_CACHE_NAME)
|
|
363
|
+
return cache
|
|
364
|
+
cache.move_to_end(key)
|
|
365
|
+
result = value
|
|
366
|
+
hit = True
|
|
367
|
+
return cache
|
|
368
|
+
|
|
369
|
+
_get_import_cache_manager().update(_SUCCESS_CACHE_NAME, _lookup)
|
|
370
|
+
if hit:
|
|
371
|
+
_get_import_cache_manager().increment_hit(_SUCCESS_CACHE_NAME)
|
|
372
|
+
return result
|
|
373
|
+
_get_import_cache_manager().increment_miss(_SUCCESS_CACHE_NAME)
|
|
374
|
+
return None
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
def _store_success(key: str, value: Any) -> None:
|
|
378
|
+
entry = _CacheEntry(value, key=key, remover=_remove_success_entry)
|
|
379
|
+
|
|
380
|
+
def _store(cache: OrderedDict[str, _CacheEntry]) -> OrderedDict[str, _CacheEntry]:
|
|
381
|
+
cache[key] = entry
|
|
382
|
+
cache.move_to_end(key)
|
|
383
|
+
_trim_cache(_SUCCESS_CACHE_NAME, cache)
|
|
384
|
+
return cache
|
|
385
|
+
|
|
386
|
+
def _purge_failure(
|
|
387
|
+
cache: OrderedDict[str, Exception],
|
|
388
|
+
) -> OrderedDict[str, Exception]:
|
|
389
|
+
if cache.pop(key, None) is not None:
|
|
390
|
+
_get_import_cache_manager().increment_eviction(_FAILURE_CACHE_NAME)
|
|
391
|
+
return cache
|
|
392
|
+
|
|
393
|
+
_get_import_cache_manager().update(_SUCCESS_CACHE_NAME, _store)
|
|
394
|
+
_get_import_cache_manager().update(_FAILURE_CACHE_NAME, _purge_failure)
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
def _get_failure(key: str) -> Exception | None:
|
|
398
|
+
result: Exception | None = None
|
|
399
|
+
hit = False
|
|
400
|
+
|
|
401
|
+
with _get_import_cache_manager().timer(_FAILURE_CACHE_NAME):
|
|
402
|
+
|
|
403
|
+
def _lookup(cache: OrderedDict[str, Exception]) -> OrderedDict[str, Exception]:
|
|
404
|
+
nonlocal result, hit
|
|
405
|
+
exc = cache.get(key)
|
|
406
|
+
if exc is None:
|
|
407
|
+
return cache
|
|
408
|
+
cache.move_to_end(key)
|
|
409
|
+
result = exc
|
|
410
|
+
hit = True
|
|
411
|
+
return cache
|
|
412
|
+
|
|
413
|
+
_get_import_cache_manager().update(_FAILURE_CACHE_NAME, _lookup)
|
|
414
|
+
if hit:
|
|
415
|
+
_get_import_cache_manager().increment_hit(_FAILURE_CACHE_NAME)
|
|
416
|
+
return result
|
|
417
|
+
_get_import_cache_manager().increment_miss(_FAILURE_CACHE_NAME)
|
|
418
|
+
return None
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
def _store_failure(key: str, exc: Exception) -> None:
|
|
422
|
+
|
|
423
|
+
def _store(cache: OrderedDict[str, Exception]) -> OrderedDict[str, Exception]:
|
|
424
|
+
cache[key] = exc
|
|
425
|
+
cache.move_to_end(key)
|
|
426
|
+
_trim_cache(_FAILURE_CACHE_NAME, cache)
|
|
427
|
+
return cache
|
|
428
|
+
|
|
429
|
+
def _purge_success(
|
|
430
|
+
cache: OrderedDict[str, _CacheEntry],
|
|
431
|
+
) -> OrderedDict[str, _CacheEntry]:
|
|
432
|
+
if cache.pop(key, None) is not None:
|
|
433
|
+
_get_import_cache_manager().increment_eviction(_SUCCESS_CACHE_NAME)
|
|
434
|
+
return cache
|
|
435
|
+
|
|
436
|
+
_get_import_cache_manager().update(_FAILURE_CACHE_NAME, _store)
|
|
437
|
+
_get_import_cache_manager().update(_SUCCESS_CACHE_NAME, _purge_success)
|
|
438
|
+
|
|
439
|
+
|
|
440
|
+
def _clear_import_cache() -> None:
|
|
441
|
+
_get_import_cache_manager().clear()
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
_IMPORT_STATE = ImportRegistry()
|
|
445
|
+
# Public alias to ease direct introspection in tests and diagnostics.
|
|
446
|
+
IMPORT_LOG = _IMPORT_STATE
|
|
447
|
+
|
|
448
|
+
|
|
449
|
+
def _reset_import_state() -> None:
|
|
450
|
+
"""Reset cached import tracking structures."""
|
|
451
|
+
|
|
452
|
+
global _IMPORT_STATE, IMPORT_LOG
|
|
453
|
+
_IMPORT_STATE = ImportRegistry()
|
|
454
|
+
IMPORT_LOG = _IMPORT_STATE
|
|
455
|
+
_clear_import_cache()
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
def _import_cached(module_name: str, attr: str | None) -> tuple[bool, Any]:
|
|
459
|
+
"""Import ``module_name`` (and optional ``attr``) capturing failures."""
|
|
460
|
+
|
|
461
|
+
key = _import_key(module_name, attr)
|
|
462
|
+
cached_value = _get_success(key)
|
|
463
|
+
if cached_value is not None:
|
|
464
|
+
return True, cached_value
|
|
465
|
+
|
|
466
|
+
cached_failure = _get_failure(key)
|
|
467
|
+
if cached_failure is not None:
|
|
468
|
+
return False, cached_failure
|
|
469
|
+
|
|
470
|
+
try:
|
|
471
|
+
module = importlib.import_module(module_name)
|
|
472
|
+
obj = getattr(module, attr) if attr else module
|
|
473
|
+
except (ImportError, AttributeError) as exc:
|
|
474
|
+
_store_failure(key, exc)
|
|
475
|
+
return False, exc
|
|
476
|
+
|
|
477
|
+
_store_success(key, obj)
|
|
478
|
+
return True, obj
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
logger = get_logger(__name__)
|
|
482
|
+
|
|
483
|
+
|
|
484
|
+
def _format_failure_message(module: str, attr: str | None, err: Exception) -> str:
|
|
485
|
+
"""Return a standardised failure message."""
|
|
486
|
+
|
|
487
|
+
return (
|
|
488
|
+
f"Failed to import module '{module}': {err}"
|
|
489
|
+
if isinstance(err, ImportError)
|
|
490
|
+
else f"Module '{module}' has no attribute '{attr}': {err}"
|
|
491
|
+
)
|
|
492
|
+
|
|
493
|
+
|
|
494
|
+
EMIT_MAP: dict[str, Callable[[str], None]] = {
|
|
495
|
+
"warn": lambda msg: _emit(msg, "warn"),
|
|
496
|
+
"log": lambda msg: _emit(msg, "log"),
|
|
497
|
+
"both": lambda msg: _emit(msg, "both"),
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
|
|
501
|
+
def _emit(message: str, mode: Literal["warn", "log", "both"]) -> None:
|
|
502
|
+
"""Emit ``message`` via :mod:`warnings`, logger or both."""
|
|
503
|
+
|
|
504
|
+
if mode in ("warn", "both"):
|
|
505
|
+
warnings.warn(message, RuntimeWarning, stacklevel=2)
|
|
506
|
+
if mode in ("log", "both"):
|
|
507
|
+
logger.warning(message)
|
|
508
|
+
|
|
509
|
+
|
|
510
|
+
def _warn_failure(
|
|
511
|
+
module: str,
|
|
512
|
+
attr: str | None,
|
|
513
|
+
err: Exception,
|
|
514
|
+
*,
|
|
515
|
+
emit: Literal["warn", "log", "both"] = "warn",
|
|
516
|
+
) -> None:
|
|
517
|
+
"""Emit a warning about a failed import."""
|
|
518
|
+
|
|
519
|
+
msg = _format_failure_message(module, attr, err)
|
|
520
|
+
if _IMPORT_STATE.mark_warning(module):
|
|
521
|
+
EMIT_MAP[emit](msg)
|
|
522
|
+
else:
|
|
523
|
+
logger.debug(msg)
|
|
524
|
+
|
|
525
|
+
|
|
526
|
+
class LazyImportProxy:
|
|
527
|
+
"""Descriptor that defers imports until first use."""
|
|
528
|
+
|
|
529
|
+
__slots__ = (
|
|
530
|
+
"_module",
|
|
531
|
+
"_attr",
|
|
532
|
+
"_emit",
|
|
533
|
+
"_fallback",
|
|
534
|
+
"_target_ref",
|
|
535
|
+
"_strong_target",
|
|
536
|
+
"_lock",
|
|
537
|
+
"_key",
|
|
538
|
+
"__weakref__",
|
|
539
|
+
)
|
|
540
|
+
|
|
541
|
+
_UNRESOLVED = object()
|
|
542
|
+
|
|
543
|
+
def __init__(
|
|
544
|
+
self,
|
|
545
|
+
module_name: str,
|
|
546
|
+
attr: str | None,
|
|
547
|
+
emit: Literal["warn", "log", "both"],
|
|
548
|
+
fallback: Any | None,
|
|
549
|
+
) -> None:
|
|
550
|
+
self._module = module_name
|
|
551
|
+
self._attr = attr
|
|
552
|
+
self._emit = emit
|
|
553
|
+
self._fallback = fallback
|
|
554
|
+
self._target_ref: weakref.ReferenceType[Any] | None = None
|
|
555
|
+
self._strong_target: Any = self._UNRESOLVED
|
|
556
|
+
self._lock = threading.Lock()
|
|
557
|
+
self._key = _import_key(module_name, attr)
|
|
558
|
+
|
|
559
|
+
def _store_target(self, target: Any) -> None:
|
|
560
|
+
try:
|
|
561
|
+
self_ref = weakref.ref(self)
|
|
562
|
+
|
|
563
|
+
def _cleanup(ref: weakref.ReferenceType[Any]) -> None:
|
|
564
|
+
proxy = self_ref()
|
|
565
|
+
if proxy is None:
|
|
566
|
+
return
|
|
567
|
+
with proxy._lock:
|
|
568
|
+
if proxy._target_ref is ref:
|
|
569
|
+
proxy._target_ref = None
|
|
570
|
+
|
|
571
|
+
self._target_ref = weakref.ref(target, _cleanup)
|
|
572
|
+
except TypeError:
|
|
573
|
+
self._strong_target = target
|
|
574
|
+
self._target_ref = None
|
|
575
|
+
else:
|
|
576
|
+
self._strong_target = self._UNRESOLVED
|
|
577
|
+
|
|
578
|
+
def _resolved_target(self) -> Any:
|
|
579
|
+
if self._strong_target is not self._UNRESOLVED:
|
|
580
|
+
return self._strong_target
|
|
581
|
+
if self._target_ref is None:
|
|
582
|
+
return self._UNRESOLVED
|
|
583
|
+
target = self._target_ref()
|
|
584
|
+
if target is None:
|
|
585
|
+
self._target_ref = None
|
|
586
|
+
return self._UNRESOLVED
|
|
587
|
+
return target
|
|
588
|
+
|
|
589
|
+
def _resolve(self) -> Any:
|
|
590
|
+
target = self._resolved_target()
|
|
591
|
+
if target is not self._UNRESOLVED:
|
|
592
|
+
return target
|
|
593
|
+
|
|
594
|
+
with self._lock:
|
|
595
|
+
target = self._resolved_target()
|
|
596
|
+
if target is self._UNRESOLVED:
|
|
597
|
+
target = _resolve_import(
|
|
598
|
+
self._module,
|
|
599
|
+
self._attr,
|
|
600
|
+
self._emit,
|
|
601
|
+
self._fallback,
|
|
602
|
+
)
|
|
603
|
+
self._store_target(target)
|
|
604
|
+
return target
|
|
605
|
+
|
|
606
|
+
def resolve(self) -> Any:
|
|
607
|
+
"""Eagerly resolve and return the proxied object."""
|
|
608
|
+
|
|
609
|
+
return self._resolve()
|
|
610
|
+
|
|
611
|
+
def __getattr__(self, item: str) -> Any:
|
|
612
|
+
"""Proxy attribute access to the resolved target."""
|
|
613
|
+
|
|
614
|
+
return getattr(self._resolve(), item)
|
|
615
|
+
|
|
616
|
+
def __call__(self, *args: Any, **kwargs: Any) -> Any:
|
|
617
|
+
"""Invoke the resolved target with ``args``/``kwargs``."""
|
|
618
|
+
|
|
619
|
+
return self._resolve()(*args, **kwargs)
|
|
620
|
+
|
|
621
|
+
def __bool__(self) -> bool:
|
|
622
|
+
"""Return truthiness of the resolved target."""
|
|
623
|
+
|
|
624
|
+
return bool(self._resolve())
|
|
625
|
+
|
|
626
|
+
def __repr__(self) -> str: # pragma: no cover - representation helper
|
|
627
|
+
"""Return representation showing resolution status."""
|
|
628
|
+
|
|
629
|
+
target = self._resolved_target()
|
|
630
|
+
if target is self._UNRESOLVED:
|
|
631
|
+
return f"<LazyImportProxy pending={self._key!r}>"
|
|
632
|
+
return repr(target)
|
|
633
|
+
|
|
634
|
+
def __str__(self) -> str: # pragma: no cover - representation helper
|
|
635
|
+
"""Return string representation of the resolved target."""
|
|
636
|
+
|
|
637
|
+
return str(self._resolve())
|
|
638
|
+
|
|
639
|
+
def __iter__(self) -> Iterator[Any]: # pragma: no cover - passthrough helper
|
|
640
|
+
"""Yield iteration from the resolved target."""
|
|
641
|
+
|
|
642
|
+
return iter(self._resolve())
|
|
643
|
+
|
|
644
|
+
|
|
645
|
+
def _resolve_import(
|
|
646
|
+
module_name: str,
|
|
647
|
+
attr: str | None,
|
|
648
|
+
emit: Literal["warn", "log", "both"],
|
|
649
|
+
fallback: Any | None,
|
|
650
|
+
) -> Any | None:
|
|
651
|
+
key = _import_key(module_name, attr)
|
|
652
|
+
success, result = _import_cached(module_name, attr)
|
|
653
|
+
if success:
|
|
654
|
+
_IMPORT_STATE.discard(key)
|
|
655
|
+
if attr is not None:
|
|
656
|
+
_IMPORT_STATE.discard(module_name)
|
|
657
|
+
return result
|
|
658
|
+
|
|
659
|
+
exc: Exception = result
|
|
660
|
+
include_module = isinstance(exc, ImportError)
|
|
661
|
+
_warn_failure(module_name, attr, exc, emit=emit)
|
|
662
|
+
_IMPORT_STATE.record_failure(key, module=module_name if include_module else None)
|
|
663
|
+
return fallback
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
def cached_import(
|
|
667
|
+
module_name: str,
|
|
668
|
+
attr: str | None = None,
|
|
669
|
+
*,
|
|
670
|
+
fallback: Any | None = None,
|
|
671
|
+
emit: Literal["warn", "log", "both"] = "warn",
|
|
672
|
+
lazy: bool = False,
|
|
673
|
+
) -> Any | None:
|
|
674
|
+
"""Import ``module_name`` (and optional ``attr``) with caching and fallback.
|
|
675
|
+
|
|
676
|
+
When ``lazy`` is ``True`` the import is deferred until the returned proxy is
|
|
677
|
+
first used. The proxy integrates with the shared cache so subsequent calls
|
|
678
|
+
return the resolved object directly.
|
|
679
|
+
"""
|
|
680
|
+
|
|
681
|
+
key = _import_key(module_name, attr)
|
|
682
|
+
|
|
683
|
+
if lazy:
|
|
684
|
+
cached_obj = _get_success(key)
|
|
685
|
+
if cached_obj is not None:
|
|
686
|
+
return cached_obj
|
|
687
|
+
return LazyImportProxy(module_name, attr, emit, fallback)
|
|
688
|
+
|
|
689
|
+
return _resolve_import(module_name, attr, emit, fallback)
|
|
690
|
+
|
|
691
|
+
|
|
692
|
+
_ModuleSpec = str | tuple[str, str | None]
|
|
693
|
+
|
|
694
|
+
|
|
695
|
+
def _normalise_warm_specs(
|
|
696
|
+
module: _ModuleSpec | Iterable[_ModuleSpec],
|
|
697
|
+
extra: tuple[_ModuleSpec, ...],
|
|
698
|
+
attr: str | None,
|
|
699
|
+
) -> list[tuple[str, str | None]]:
|
|
700
|
+
if attr is not None:
|
|
701
|
+
if extra:
|
|
702
|
+
raise ValueError("'attr' can only be combined with a single module name")
|
|
703
|
+
if not isinstance(module, str):
|
|
704
|
+
raise TypeError(
|
|
705
|
+
"'attr' requires the first argument to be a module name string"
|
|
706
|
+
)
|
|
707
|
+
return [(module, attr)]
|
|
708
|
+
|
|
709
|
+
specs: list[_ModuleSpec]
|
|
710
|
+
if extra:
|
|
711
|
+
specs = [module, *extra]
|
|
712
|
+
elif isinstance(module, tuple) and len(module) == 2:
|
|
713
|
+
specs = [module]
|
|
714
|
+
elif isinstance(module, str):
|
|
715
|
+
specs = [module]
|
|
716
|
+
else:
|
|
717
|
+
if isinstance(module, Iterable):
|
|
718
|
+
specs = list(module)
|
|
719
|
+
if not specs:
|
|
720
|
+
raise ValueError("At least one module specification is required")
|
|
721
|
+
else:
|
|
722
|
+
raise TypeError("Unsupported module specification for warm_cached_import")
|
|
723
|
+
|
|
724
|
+
normalised: list[tuple[str, str | None]] = []
|
|
725
|
+
for spec in specs:
|
|
726
|
+
if isinstance(spec, str):
|
|
727
|
+
normalised.append((spec, None))
|
|
728
|
+
continue
|
|
729
|
+
if isinstance(spec, tuple) and len(spec) == 2:
|
|
730
|
+
module_name, module_attr = spec
|
|
731
|
+
if not isinstance(module_name, str) or (
|
|
732
|
+
module_attr is not None and not isinstance(module_attr, str)
|
|
733
|
+
):
|
|
734
|
+
raise TypeError("Invalid module specification for warm_cached_import")
|
|
735
|
+
normalised.append((module_name, module_attr))
|
|
736
|
+
continue
|
|
737
|
+
raise TypeError(
|
|
738
|
+
"Module specifications must be strings or (module, attr) tuples"
|
|
739
|
+
)
|
|
740
|
+
|
|
741
|
+
return normalised
|
|
742
|
+
|
|
743
|
+
|
|
744
|
+
def warm_cached_import(
|
|
745
|
+
module: _ModuleSpec | Iterable[_ModuleSpec],
|
|
746
|
+
*extra: _ModuleSpec,
|
|
747
|
+
attr: str | None = None,
|
|
748
|
+
fallback: Any | None = None,
|
|
749
|
+
emit: Literal["warn", "log", "both"] = "warn",
|
|
750
|
+
lazy: bool = False,
|
|
751
|
+
resolve: bool = False,
|
|
752
|
+
) -> Any | dict[str, Any | None]:
|
|
753
|
+
"""Pre-populate the import cache for the provided module specifications.
|
|
754
|
+
|
|
755
|
+
When ``lazy`` is ``True`` the cached objects are returned as proxies by
|
|
756
|
+
default. Setting ``resolve`` forces those proxies to resolve immediately
|
|
757
|
+
during the warm-up phase while still sharing the same cache entries.
|
|
758
|
+
"""
|
|
759
|
+
|
|
760
|
+
if resolve and not lazy:
|
|
761
|
+
raise ValueError("'resolve' can only be used when 'lazy' is True")
|
|
762
|
+
|
|
763
|
+
specs = _normalise_warm_specs(module, extra, attr)
|
|
764
|
+
results: dict[str, Any | None] = {}
|
|
765
|
+
for module_name, module_attr in specs:
|
|
766
|
+
key = _import_key(module_name, module_attr)
|
|
767
|
+
results[key] = cached_import(
|
|
768
|
+
module_name,
|
|
769
|
+
module_attr,
|
|
770
|
+
fallback=fallback,
|
|
771
|
+
emit=emit,
|
|
772
|
+
lazy=lazy,
|
|
773
|
+
)
|
|
774
|
+
if resolve and isinstance(results[key], LazyImportProxy):
|
|
775
|
+
results[key] = results[key].resolve()
|
|
776
|
+
|
|
777
|
+
if len(results) == 1:
|
|
778
|
+
return next(iter(results.values()))
|
|
779
|
+
return results
|
|
780
|
+
|
|
781
|
+
|
|
782
|
+
def _clear_default_cache() -> None:
|
|
783
|
+
global _NP_MISSING_LOGGED
|
|
784
|
+
|
|
785
|
+
_clear_import_cache()
|
|
786
|
+
_NP_MISSING_LOGGED = False
|
|
787
|
+
|
|
788
|
+
|
|
789
|
+
cached_import.cache_clear = _clear_default_cache # type: ignore[attr-defined]
|
|
790
|
+
|
|
791
|
+
_NP_MISSING_LOGGED = False
|
|
792
|
+
|
|
793
|
+
|
|
794
|
+
def get_numpy() -> Any | None:
|
|
795
|
+
"""Return the cached :mod:`numpy` module when available."""
|
|
796
|
+
|
|
797
|
+
global _NP_MISSING_LOGGED
|
|
798
|
+
|
|
799
|
+
np = cached_import("numpy")
|
|
800
|
+
if np is None:
|
|
801
|
+
if not _NP_MISSING_LOGGED:
|
|
802
|
+
logger.debug("Failed to import numpy; continuing in non-vectorised mode")
|
|
803
|
+
_NP_MISSING_LOGGED = True
|
|
804
|
+
return None
|
|
805
|
+
|
|
806
|
+
if _NP_MISSING_LOGGED:
|
|
807
|
+
_NP_MISSING_LOGGED = False
|
|
808
|
+
return np
|
|
809
|
+
|
|
810
|
+
|
|
811
|
+
def get_nodenx() -> type | None:
|
|
812
|
+
"""Return :class:`tnfr.node.NodeNX` using import caching."""
|
|
813
|
+
|
|
814
|
+
return cached_import("tnfr.node", "NodeNX")
|
|
815
|
+
|
|
816
|
+
|
|
817
|
+
def prune_failed_imports() -> None:
|
|
818
|
+
"""Clear the registry of recorded import failures and warnings."""
|
|
819
|
+
|
|
820
|
+
_IMPORT_STATE.clear()
|
|
821
|
+
_get_import_cache_manager().clear(_FAILURE_CACHE_NAME)
|