pico-ioc 1.2.0__py3-none-any.whl → 1.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pico_ioc/__init__.py +30 -4
- pico_ioc/_state.py +69 -4
- pico_ioc/_version.py +1 -1
- pico_ioc/api.py +183 -251
- pico_ioc/builder.py +294 -0
- pico_ioc/config.py +332 -0
- pico_ioc/container.py +73 -26
- pico_ioc/decorators.py +88 -9
- pico_ioc/interceptors.py +56 -0
- pico_ioc/plugins.py +17 -1
- pico_ioc/policy.py +245 -0
- pico_ioc/proxy.py +59 -7
- pico_ioc/resolver.py +54 -46
- pico_ioc/scanner.py +75 -102
- pico_ioc/scope.py +46 -0
- pico_ioc/utils.py +25 -0
- {pico_ioc-1.2.0.dist-info → pico_ioc-1.4.0.dist-info}/METADATA +65 -16
- pico_ioc-1.4.0.dist-info/RECORD +22 -0
- pico_ioc/typing_utils.py +0 -29
- pico_ioc-1.2.0.dist-info/RECORD +0 -17
- {pico_ioc-1.2.0.dist-info → pico_ioc-1.4.0.dist-info}/WHEEL +0 -0
- {pico_ioc-1.2.0.dist-info → pico_ioc-1.4.0.dist-info}/licenses/LICENSE +0 -0
- {pico_ioc-1.2.0.dist-info → pico_ioc-1.4.0.dist-info}/top_level.txt +0 -0
pico_ioc/builder.py
ADDED
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
# src/pico_ioc/builder.py
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import inspect as _inspect
|
|
5
|
+
import logging
|
|
6
|
+
import os
|
|
7
|
+
from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Tuple
|
|
8
|
+
from typing import get_origin, get_args, Annotated
|
|
9
|
+
|
|
10
|
+
from .interceptors import MethodInterceptor, ContainerInterceptor
|
|
11
|
+
from .container import PicoContainer, _is_compatible
|
|
12
|
+
from .policy import apply_policy, _conditional_active
|
|
13
|
+
from .plugins import PicoPlugin, run_plugin_hook
|
|
14
|
+
from .scanner import scan_and_configure
|
|
15
|
+
from .resolver import Resolver, _get_hints
|
|
16
|
+
from . import _state
|
|
17
|
+
from .config import ConfigRegistry
|
|
18
|
+
|
|
19
|
+
class PicoContainerBuilder:
|
|
20
|
+
"""Configures and builds a PicoContainer. Does not touch global context."""
|
|
21
|
+
|
|
22
|
+
def __init__(self):
|
|
23
|
+
self._scan_plan: List[Tuple[Any, Optional[Callable[[str], bool]], Tuple[PicoPlugin, ...]]] = []
|
|
24
|
+
self._overrides: Dict[Any, Any] = {}
|
|
25
|
+
self._profiles: Optional[List[str]] = None
|
|
26
|
+
self._plugins: Tuple[PicoPlugin, ...] = ()
|
|
27
|
+
self._include_tags: Optional[set[str]] = None
|
|
28
|
+
self._exclude_tags: Optional[set[str]] = None
|
|
29
|
+
self._roots: Iterable[type] = ()
|
|
30
|
+
self._providers: Dict[Any, Dict] = {}
|
|
31
|
+
self._interceptor_decls: List[Tuple[Any, dict]] = []
|
|
32
|
+
self._eager: bool = True
|
|
33
|
+
self._config_registry: ConfigRegistry | None = None
|
|
34
|
+
|
|
35
|
+
# -------- fluent config --------
|
|
36
|
+
|
|
37
|
+
def with_config(self, registry: ConfigRegistry) -> "PicoContainerBuilder":
|
|
38
|
+
self._config_registry = registry
|
|
39
|
+
return self
|
|
40
|
+
|
|
41
|
+
def with_plugins(self, plugins: Tuple[PicoPlugin, ...]) -> "PicoContainerBuilder":
|
|
42
|
+
self._plugins = plugins or ()
|
|
43
|
+
return self
|
|
44
|
+
|
|
45
|
+
def with_profiles(self, profiles: Optional[List[str]]) -> "PicoContainerBuilder":
|
|
46
|
+
self._profiles = profiles
|
|
47
|
+
return self
|
|
48
|
+
|
|
49
|
+
def add_scan_package(self, package: Any, exclude: Optional[Callable[[str], bool]] = None) -> "PicoContainerBuilder":
|
|
50
|
+
self._scan_plan.append((package, exclude, self._plugins))
|
|
51
|
+
return self
|
|
52
|
+
|
|
53
|
+
def with_overrides(self, overrides: Optional[Dict[Any, Any]]) -> "PicoContainerBuilder":
|
|
54
|
+
self._overrides = overrides or {}
|
|
55
|
+
return self
|
|
56
|
+
|
|
57
|
+
def with_tag_filters(self, include: Optional[set[str]], exclude: Optional[set[str]]) -> "PicoContainerBuilder":
|
|
58
|
+
self._include_tags = include
|
|
59
|
+
self._exclude_tags = exclude
|
|
60
|
+
return self
|
|
61
|
+
|
|
62
|
+
def with_roots(self, roots: Iterable[type]) -> "PicoContainerBuilder":
|
|
63
|
+
self._roots = roots or ()
|
|
64
|
+
return self
|
|
65
|
+
|
|
66
|
+
def with_eager(self, eager: bool) -> "PicoContainerBuilder":
|
|
67
|
+
self._eager = bool(eager)
|
|
68
|
+
return self
|
|
69
|
+
|
|
70
|
+
# -------- build --------
|
|
71
|
+
|
|
72
|
+
def build(self) -> PicoContainer:
|
|
73
|
+
"""Build and return a fully configured container."""
|
|
74
|
+
requested_profiles = _resolve_profiles(self._profiles)
|
|
75
|
+
|
|
76
|
+
container = PicoContainer(providers=self._providers)
|
|
77
|
+
container._active_profiles = tuple(requested_profiles)
|
|
78
|
+
setattr(container, "_config_registry", self._config_registry)
|
|
79
|
+
|
|
80
|
+
for pkg, exclude, scan_plugins in self._scan_plan:
|
|
81
|
+
with _state.scanning_flag():
|
|
82
|
+
c, f, decls = scan_and_configure(pkg, container, exclude=exclude, plugins=scan_plugins)
|
|
83
|
+
logging.info("Scanned '%s' (components: %d, factories: %d)", getattr(pkg, "__name__", pkg), c, f)
|
|
84
|
+
self._interceptor_decls.extend(decls)
|
|
85
|
+
|
|
86
|
+
_activate_and_build_interceptors(
|
|
87
|
+
container=container,
|
|
88
|
+
interceptor_decls=self._interceptor_decls,
|
|
89
|
+
profiles=requested_profiles,
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
binder = container.binder()
|
|
93
|
+
|
|
94
|
+
if self._overrides:
|
|
95
|
+
_apply_overrides(container, self._overrides)
|
|
96
|
+
|
|
97
|
+
run_plugin_hook(self._plugins, "after_bind", container, binder)
|
|
98
|
+
run_plugin_hook(self._plugins, "before_eager", container, binder)
|
|
99
|
+
|
|
100
|
+
apply_policy(container, profiles=requested_profiles)
|
|
101
|
+
_filter_by_tags(container, self._include_tags, self._exclude_tags)
|
|
102
|
+
|
|
103
|
+
if self._roots:
|
|
104
|
+
_restrict_to_subgraph(container, self._roots, self._overrides)
|
|
105
|
+
|
|
106
|
+
run_plugin_hook(self._plugins, "after_ready", container, binder)
|
|
107
|
+
|
|
108
|
+
if self._eager:
|
|
109
|
+
container.eager_instantiate_all()
|
|
110
|
+
logging.info("Container configured and ready.")
|
|
111
|
+
return container
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
# ---------------- helpers ----------------
|
|
115
|
+
|
|
116
|
+
def _resolve_profiles(profiles: Optional[List[str]]) -> List[str]:
|
|
117
|
+
if profiles is not None:
|
|
118
|
+
return list(profiles)
|
|
119
|
+
env_val = os.getenv("PICO_PROFILE", "")
|
|
120
|
+
return [p.strip() for p in env_val.split(",") if p.strip()]
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def _as_provider(val):
|
|
124
|
+
if isinstance(val, tuple) and len(val) == 2 and callable(val[0]) and isinstance(val[1], bool):
|
|
125
|
+
return val[0], val[1]
|
|
126
|
+
if callable(val):
|
|
127
|
+
return val, False
|
|
128
|
+
return (lambda v=val: v), False
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _apply_overrides(container: PicoContainer, overrides: Dict[Any, Any]) -> None:
|
|
132
|
+
for key, val in overrides.items():
|
|
133
|
+
provider, lazy = _as_provider(val)
|
|
134
|
+
container.bind(key, provider, lazy=lazy)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def _filter_by_tags(container: PicoContainer, include_tags: Optional[set[str]], exclude_tags: Optional[set[str]]) -> None:
|
|
138
|
+
if not include_tags and not exclude_tags:
|
|
139
|
+
return
|
|
140
|
+
|
|
141
|
+
def _tag_ok(meta: dict) -> bool:
|
|
142
|
+
tags = set(meta.get("tags", ()))
|
|
143
|
+
if include_tags and not tags.intersection(include_tags):
|
|
144
|
+
return False
|
|
145
|
+
if exclude_tags and tags.intersection(exclude_tags):
|
|
146
|
+
return False
|
|
147
|
+
return True
|
|
148
|
+
|
|
149
|
+
container._providers = {k: v for k, v in container._providers.items() if _tag_ok(v)}
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def _compute_allowed_subgraph(container: PicoContainer, roots: Iterable[type]) -> set:
|
|
153
|
+
allowed: set[Any] = set(roots)
|
|
154
|
+
stack = list(roots or ())
|
|
155
|
+
|
|
156
|
+
def _add_impls_for_base(base_t):
|
|
157
|
+
for prov_key, meta in container._providers.items():
|
|
158
|
+
cls = prov_key if isinstance(prov_key, type) else None
|
|
159
|
+
if cls is not None and _is_compatible(cls, base_t):
|
|
160
|
+
if prov_key not in allowed:
|
|
161
|
+
allowed.add(prov_key)
|
|
162
|
+
stack.append(prov_key)
|
|
163
|
+
|
|
164
|
+
while stack:
|
|
165
|
+
k = stack.pop()
|
|
166
|
+
allowed.add(k)
|
|
167
|
+
if isinstance(k, type):
|
|
168
|
+
_add_impls_for_base(k)
|
|
169
|
+
|
|
170
|
+
cls = k if isinstance(k, type) else None
|
|
171
|
+
if cls is None or not container.has(k):
|
|
172
|
+
continue
|
|
173
|
+
|
|
174
|
+
try:
|
|
175
|
+
sig = _inspect.signature(cls.__init__)
|
|
176
|
+
hints = _get_hints(cls.__init__, owner_cls=cls)
|
|
177
|
+
except Exception:
|
|
178
|
+
continue
|
|
179
|
+
|
|
180
|
+
for pname, param in sig.parameters.items():
|
|
181
|
+
if pname == "self":
|
|
182
|
+
continue
|
|
183
|
+
ann = hints.get(pname, param.annotation)
|
|
184
|
+
origin = get_origin(ann) or ann
|
|
185
|
+
if origin in (list, tuple):
|
|
186
|
+
inner = (get_args(ann) or (object,))[0]
|
|
187
|
+
if get_origin(inner) is Annotated:
|
|
188
|
+
inner = (get_args(inner) or (object,))[0]
|
|
189
|
+
if isinstance(inner, type):
|
|
190
|
+
if inner not in allowed:
|
|
191
|
+
stack.append(inner)
|
|
192
|
+
continue
|
|
193
|
+
if isinstance(ann, type) and ann not in allowed:
|
|
194
|
+
stack.append(ann)
|
|
195
|
+
elif container.has(pname) and pname not in allowed:
|
|
196
|
+
stack.append(pname)
|
|
197
|
+
return allowed
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def _restrict_to_subgraph(container: PicoContainer, roots: Iterable[type], overrides: Optional[Dict[Any, Any]]) -> None:
|
|
201
|
+
allowed = _compute_allowed_subgraph(container, roots)
|
|
202
|
+
keep_keys: set[Any] = allowed | (set(overrides.keys()) if overrides else set())
|
|
203
|
+
container._providers = {k: v for k, v in container._providers.items() if k in keep_keys}
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def _activate_and_build_interceptors(
|
|
207
|
+
*, container: PicoContainer, interceptor_decls: List[Tuple[Any, dict]], profiles: List[str],
|
|
208
|
+
) -> None:
|
|
209
|
+
resolver = Resolver(container)
|
|
210
|
+
active: List[Tuple[int, str, str, Any]] = []
|
|
211
|
+
activated_method_names: List[str] = []
|
|
212
|
+
activated_container_names: List[str] = []
|
|
213
|
+
skipped_debug: List[str] = []
|
|
214
|
+
|
|
215
|
+
def _interceptor_meta_active(meta: dict) -> bool:
|
|
216
|
+
profs = tuple(meta.get("profiles", ())) or ()
|
|
217
|
+
if profs and (not profiles or not any(p in profs for p in profiles)):
|
|
218
|
+
return False
|
|
219
|
+
req_env = tuple(meta.get("require_env", ())) or ()
|
|
220
|
+
if req_env and not all(os.getenv(k) not in (None, "") for k in req_env):
|
|
221
|
+
return False
|
|
222
|
+
pred = meta.get("predicate", None)
|
|
223
|
+
if callable(pred):
|
|
224
|
+
try:
|
|
225
|
+
if not bool(pred()):
|
|
226
|
+
return False
|
|
227
|
+
except Exception:
|
|
228
|
+
logging.exception("Interceptor predicate failed; skipping")
|
|
229
|
+
return False
|
|
230
|
+
return True
|
|
231
|
+
|
|
232
|
+
def _looks_like_container_interceptor(inst: Any) -> bool:
|
|
233
|
+
return all(
|
|
234
|
+
hasattr(inst, m) for m in ("on_resolve", "on_before_create", "on_after_create", "on_exception")
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
for raw_obj, meta in interceptor_decls:
|
|
238
|
+
owner_cls, obj = (raw_obj[0], raw_obj[1]) if isinstance(raw_obj, tuple) and len(raw_obj) == 2 else (None, raw_obj)
|
|
239
|
+
qn = getattr(obj, "__qualname__", repr(obj))
|
|
240
|
+
|
|
241
|
+
if not _conditional_active(obj, profiles=profiles) or not _interceptor_meta_active(meta):
|
|
242
|
+
skipped_debug.append(f"skip:{qn}")
|
|
243
|
+
continue
|
|
244
|
+
|
|
245
|
+
try:
|
|
246
|
+
if isinstance(obj, type):
|
|
247
|
+
inst = resolver.create_instance(obj)
|
|
248
|
+
elif owner_cls is not None:
|
|
249
|
+
owner_inst = resolver.create_instance(owner_cls)
|
|
250
|
+
bound = obj.__get__(owner_inst, owner_cls)
|
|
251
|
+
kwargs = resolver.kwargs_for_callable(bound, owner_cls=owner_cls)
|
|
252
|
+
inst = bound(**kwargs)
|
|
253
|
+
else:
|
|
254
|
+
kwargs = resolver.kwargs_for_callable(obj, owner_cls=None)
|
|
255
|
+
inst = obj(**kwargs)
|
|
256
|
+
except Exception:
|
|
257
|
+
logging.exception("Failed to construct interceptor %r", obj)
|
|
258
|
+
continue
|
|
259
|
+
|
|
260
|
+
kind = meta.get("kind", "method")
|
|
261
|
+
if kind == "method" and not callable(inst):
|
|
262
|
+
logging.error("Interceptor %s is not valid for kind %s; skipping", qn, kind)
|
|
263
|
+
continue
|
|
264
|
+
if kind == "container" and not _looks_like_container_interceptor(inst):
|
|
265
|
+
logging.error("Container interceptor %s lacks required methods; skipping", qn)
|
|
266
|
+
continue
|
|
267
|
+
|
|
268
|
+
order = int(meta.get("order", 0))
|
|
269
|
+
active.append((order, qn, kind, inst))
|
|
270
|
+
|
|
271
|
+
active.sort(key=lambda t: (t[0], t[1]))
|
|
272
|
+
|
|
273
|
+
for _order, _qn, kind, inst in active:
|
|
274
|
+
if kind == "container":
|
|
275
|
+
container.add_container_interceptor(inst) # type: ignore[arg-type]
|
|
276
|
+
activated_container_names.append(_qn)
|
|
277
|
+
else:
|
|
278
|
+
container.add_method_interceptor(inst) # type: ignore[arg-type]
|
|
279
|
+
activated_method_names.append(_qn)
|
|
280
|
+
|
|
281
|
+
if activated_method_names or activated_container_names:
|
|
282
|
+
logging.info(
|
|
283
|
+
"Interceptors activated: method=%d, container=%d",
|
|
284
|
+
len(activated_method_names),
|
|
285
|
+
len(activated_container_names),
|
|
286
|
+
)
|
|
287
|
+
logging.debug(
|
|
288
|
+
"Activated method=%s; Activated container=%s",
|
|
289
|
+
", ".join(activated_method_names) or "-",
|
|
290
|
+
", ".join(activated_container_names) or "-",
|
|
291
|
+
)
|
|
292
|
+
if skipped_debug:
|
|
293
|
+
logging.debug("Skipped interceptors: %s", ", ".join(skipped_debug))
|
|
294
|
+
|
pico_ioc/config.py
ADDED
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
# src/pico_ioc/config.py
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import os, json, configparser, pathlib
|
|
5
|
+
from dataclasses import is_dataclass, fields, MISSING
|
|
6
|
+
from typing import Any, Callable, Dict, Iterable, Optional, Sequence, Tuple, Protocol
|
|
7
|
+
|
|
8
|
+
# ---- Flags & metadata on classes / fields ----
|
|
9
|
+
_CONFIG_FLAG = "_pico_is_config_component"
|
|
10
|
+
_CONFIG_PREFIX = "_pico_config_prefix"
|
|
11
|
+
_FIELD_META = "_pico_config_field_meta" # dict: name -> FieldSpec
|
|
12
|
+
|
|
13
|
+
# ---- Source protocol & implementations ----
|
|
14
|
+
|
|
15
|
+
class ConfigSource(Protocol):
|
|
16
|
+
def get(self, key: str) -> Optional[str]: ...
|
|
17
|
+
def keys(self) -> Iterable[str]: ...
|
|
18
|
+
|
|
19
|
+
class EnvSource:
|
|
20
|
+
def __init__(self, prefix: str = ""):
|
|
21
|
+
self.prefix = prefix or ""
|
|
22
|
+
def get(self, key: str) -> Optional[str]:
|
|
23
|
+
# try PREFIX+KEY first, then KEY
|
|
24
|
+
v = os.getenv(self.prefix + key)
|
|
25
|
+
if v is not None:
|
|
26
|
+
return v
|
|
27
|
+
return os.getenv(key)
|
|
28
|
+
def keys(self) -> Iterable[str]:
|
|
29
|
+
# best-effort; env keys only (without prefix expansion)
|
|
30
|
+
return os.environ.keys()
|
|
31
|
+
|
|
32
|
+
class FileSource:
|
|
33
|
+
def __init__(self, path: os.PathLike[str] | str, optional: bool = False):
|
|
34
|
+
self.path = str(path)
|
|
35
|
+
self.optional = bool(optional)
|
|
36
|
+
self._cache: Dict[str, Any] = {}
|
|
37
|
+
self._load_once()
|
|
38
|
+
|
|
39
|
+
def _load_once(self):
|
|
40
|
+
p = pathlib.Path(self.path)
|
|
41
|
+
if not p.exists():
|
|
42
|
+
if self.optional:
|
|
43
|
+
self._cache = {}
|
|
44
|
+
return
|
|
45
|
+
raise FileNotFoundError(self.path)
|
|
46
|
+
text = p.read_text(encoding="utf-8")
|
|
47
|
+
|
|
48
|
+
# Try in order: JSON, INI, dotenv, YAML (if available)
|
|
49
|
+
# JSON
|
|
50
|
+
try:
|
|
51
|
+
data = json.loads(text)
|
|
52
|
+
self._cache = _flatten_obj(data)
|
|
53
|
+
return
|
|
54
|
+
except Exception:
|
|
55
|
+
pass
|
|
56
|
+
# INI
|
|
57
|
+
try:
|
|
58
|
+
cp = configparser.ConfigParser()
|
|
59
|
+
cp.read_string(text)
|
|
60
|
+
data = {s: dict(cp.items(s)) for s in cp.sections()}
|
|
61
|
+
# also root-level keys under DEFAULT
|
|
62
|
+
data.update(dict(cp.defaults()))
|
|
63
|
+
self._cache = _flatten_obj(data)
|
|
64
|
+
return
|
|
65
|
+
except Exception:
|
|
66
|
+
pass
|
|
67
|
+
# dotenv (simple KEY=VALUE per line)
|
|
68
|
+
try:
|
|
69
|
+
kv = {}
|
|
70
|
+
for line in text.splitlines():
|
|
71
|
+
line = line.strip()
|
|
72
|
+
if not line or line.startswith("#"):
|
|
73
|
+
continue
|
|
74
|
+
if "=" in line:
|
|
75
|
+
k, v = line.split("=", 1)
|
|
76
|
+
kv[k.strip()] = _strip_quotes(v.strip())
|
|
77
|
+
self._cache = _flatten_obj(kv)
|
|
78
|
+
if self._cache:
|
|
79
|
+
return
|
|
80
|
+
except Exception:
|
|
81
|
+
pass
|
|
82
|
+
# YAML if available
|
|
83
|
+
try:
|
|
84
|
+
import yaml # type: ignore
|
|
85
|
+
data = yaml.safe_load(text) or {}
|
|
86
|
+
self._cache = _flatten_obj(data)
|
|
87
|
+
return
|
|
88
|
+
except Exception:
|
|
89
|
+
# if everything fails, fallback to empty (optional) or raise
|
|
90
|
+
if self.optional:
|
|
91
|
+
self._cache = {}
|
|
92
|
+
return
|
|
93
|
+
raise ValueError(f"Unrecognized file format: {self.path}")
|
|
94
|
+
|
|
95
|
+
def get(self, key: str) -> Optional[str]:
|
|
96
|
+
v = self._cache.get(key)
|
|
97
|
+
return None if v is None else str(v)
|
|
98
|
+
|
|
99
|
+
def keys(self) -> Iterable[str]:
|
|
100
|
+
return self._cache.keys()
|
|
101
|
+
|
|
102
|
+
# ---- Field specs (overrides) ----
|
|
103
|
+
|
|
104
|
+
class FieldSpec:
|
|
105
|
+
__slots__ = ("sources", "keys", "default", "path_is_dot")
|
|
106
|
+
def __init__(self, *, sources: Tuple[str, ...], keys: Tuple[str, ...], default: Any, path_is_dot: bool):
|
|
107
|
+
self.sources = sources
|
|
108
|
+
self.keys = keys
|
|
109
|
+
self.default = default
|
|
110
|
+
self.path_is_dot = path_is_dot # true when keys are dotted-paths for structured files
|
|
111
|
+
|
|
112
|
+
class _ValueSentinel:
|
|
113
|
+
def __getitem__(self, key_default: str | Tuple[str, Any], /):
|
|
114
|
+
if isinstance(key_default, tuple):
|
|
115
|
+
key, default = key_default
|
|
116
|
+
else:
|
|
117
|
+
key, default = key_default, MISSING
|
|
118
|
+
# default sources order env>file unless overridden in Value(...)
|
|
119
|
+
return _ValueFactory(key, default)
|
|
120
|
+
Value = _ValueSentinel()
|
|
121
|
+
|
|
122
|
+
class _ValueFactory:
|
|
123
|
+
def __init__(self, key: str, default: Any):
|
|
124
|
+
self.key = key
|
|
125
|
+
self.default = default
|
|
126
|
+
def __call__(self, *, sources: Tuple[str, ...] = ("env","file")):
|
|
127
|
+
return FieldSpec(sources=tuple(sources), keys=(self.key,), default=self.default, path_is_dot=False)
|
|
128
|
+
|
|
129
|
+
class _EnvSentinel:
|
|
130
|
+
def __getitem__(self, key_default: str | Tuple[str, Any], /):
|
|
131
|
+
key, default = (key_default if isinstance(key_default, tuple) else (key_default, MISSING))
|
|
132
|
+
return FieldSpec(sources=("env",), keys=(key,), default=default, path_is_dot=False)
|
|
133
|
+
Env = _EnvSentinel()
|
|
134
|
+
|
|
135
|
+
class _FileSentinel:
|
|
136
|
+
def __getitem__(self, key_default: str | Tuple[str, Any], /):
|
|
137
|
+
key, default = (key_default if isinstance(key_default, tuple) else (key_default, MISSING))
|
|
138
|
+
return FieldSpec(sources=("file",), keys=(key,), default=default, path_is_dot=False)
|
|
139
|
+
File = _FileSentinel()
|
|
140
|
+
|
|
141
|
+
class _PathSentinel:
|
|
142
|
+
class _FilePath:
|
|
143
|
+
def __getitem__(self, key_default: str | Tuple[str, Any], /):
|
|
144
|
+
key, default = (key_default if isinstance(key_default, tuple) else (key_default, MISSING))
|
|
145
|
+
return FieldSpec(sources=("file",), keys=(key,), default=default, path_is_dot=True)
|
|
146
|
+
file = _FilePath()
|
|
147
|
+
Path = _PathSentinel()
|
|
148
|
+
|
|
149
|
+
# ---- Class decorator ----
|
|
150
|
+
|
|
151
|
+
def config_component(*, prefix: str = ""):
|
|
152
|
+
def dec(cls):
|
|
153
|
+
setattr(cls, _CONFIG_FLAG, True)
|
|
154
|
+
setattr(cls, _CONFIG_PREFIX, prefix or "")
|
|
155
|
+
if not hasattr(cls, _FIELD_META):
|
|
156
|
+
setattr(cls, _FIELD_META, {})
|
|
157
|
+
return cls
|
|
158
|
+
return dec
|
|
159
|
+
|
|
160
|
+
def is_config_component(cls: type) -> bool:
|
|
161
|
+
return bool(getattr(cls, _CONFIG_FLAG, False))
|
|
162
|
+
|
|
163
|
+
# ---- Registry / resolution ----
|
|
164
|
+
|
|
165
|
+
class ConfigRegistry:
|
|
166
|
+
"""Holds ordered sources and provides typed resolution for @config_component classes."""
|
|
167
|
+
def __init__(self, sources: Sequence[ConfigSource]):
|
|
168
|
+
self.sources = tuple(sources or ())
|
|
169
|
+
|
|
170
|
+
def resolve(self, keys: Iterable[str]) -> Optional[str]:
|
|
171
|
+
# try each key across sources in order
|
|
172
|
+
for key in keys:
|
|
173
|
+
for src in self.sources:
|
|
174
|
+
v = src.get(key)
|
|
175
|
+
if v is not None:
|
|
176
|
+
return v
|
|
177
|
+
return None
|
|
178
|
+
|
|
179
|
+
def register_field_spec(cls: type, name: str, spec: FieldSpec) -> None:
|
|
180
|
+
meta: Dict[str, FieldSpec] = getattr(cls, _FIELD_META, None) or {}
|
|
181
|
+
meta[name] = spec
|
|
182
|
+
setattr(cls, _FIELD_META, meta)
|
|
183
|
+
|
|
184
|
+
def build_component_instance(cls: type, registry: ConfigRegistry) -> Any:
|
|
185
|
+
prefix = getattr(cls, _CONFIG_PREFIX, "")
|
|
186
|
+
overrides: Dict[str, FieldSpec] = getattr(cls, _FIELD_META, {}) or {}
|
|
187
|
+
|
|
188
|
+
if is_dataclass(cls):
|
|
189
|
+
kwargs = {}
|
|
190
|
+
for f in fields(cls):
|
|
191
|
+
name = f.name
|
|
192
|
+
spec = overrides.get(name)
|
|
193
|
+
if spec:
|
|
194
|
+
val = _resolve_with_spec(spec, registry)
|
|
195
|
+
else:
|
|
196
|
+
# auto: PREFIX+NAME or NAME (env), NAME (file)
|
|
197
|
+
val = registry.resolve((prefix + name.upper(), name.upper()))
|
|
198
|
+
if val is None and f.default is not MISSING:
|
|
199
|
+
val = f.default
|
|
200
|
+
elif val is None and f.default_factory is not MISSING: # type: ignore
|
|
201
|
+
val = f.default_factory() # type: ignore
|
|
202
|
+
if val is None and f.default is MISSING and getattr(f, "default_factory", MISSING) is MISSING: # type: ignore
|
|
203
|
+
raise NameError(f"Missing config for field {cls.__name__}.{name}")
|
|
204
|
+
kwargs[name] = _coerce_type(val, f.type)
|
|
205
|
+
return cls(**kwargs)
|
|
206
|
+
|
|
207
|
+
# Non-dataclass: inspect __init__ signature
|
|
208
|
+
import inspect
|
|
209
|
+
sig = inspect.signature(cls.__init__)
|
|
210
|
+
hints = _get_type_hints_safe(cls.__init__, owner=cls)
|
|
211
|
+
kwargs = {}
|
|
212
|
+
for pname, par in sig.parameters.items():
|
|
213
|
+
if pname == "self" or par.kind in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD):
|
|
214
|
+
continue
|
|
215
|
+
ann = hints.get(pname, par.annotation)
|
|
216
|
+
spec = overrides.get(pname)
|
|
217
|
+
if spec:
|
|
218
|
+
val = _resolve_with_spec(spec, registry)
|
|
219
|
+
else:
|
|
220
|
+
val = registry.resolve((prefix + pname.upper(), pname.upper()))
|
|
221
|
+
if val is None and par.default is not inspect._empty:
|
|
222
|
+
val = par.default
|
|
223
|
+
if val is None and par.default is inspect._empty:
|
|
224
|
+
raise NameError(f"Missing config for field {cls.__name__}.{pname}")
|
|
225
|
+
kwargs[pname] = _coerce_type(val, ann)
|
|
226
|
+
return cls(**kwargs)
|
|
227
|
+
|
|
228
|
+
# ---- helpers ----
|
|
229
|
+
|
|
230
|
+
def _resolve_with_spec(spec: FieldSpec, registry: ConfigRegistry) -> Any:
|
|
231
|
+
# respect spec.sources ordering, but try all keys for each source
|
|
232
|
+
for src_kind in spec.sources:
|
|
233
|
+
if src_kind == "env":
|
|
234
|
+
v = _resolve_from_sources(registry, spec.keys, predicate=lambda s: isinstance(s, EnvSource))
|
|
235
|
+
elif src_kind == "file":
|
|
236
|
+
if spec.path_is_dot:
|
|
237
|
+
v = _resolve_path_from_files(registry, spec.keys)
|
|
238
|
+
else:
|
|
239
|
+
v = _resolve_from_sources(registry, spec.keys, predicate=lambda s: isinstance(s, FileSource))
|
|
240
|
+
else:
|
|
241
|
+
v = None
|
|
242
|
+
if v is not None:
|
|
243
|
+
return v
|
|
244
|
+
return None if spec.default is MISSING else spec.default
|
|
245
|
+
|
|
246
|
+
def _resolve_from_sources(registry: ConfigRegistry, keys: Tuple[str, ...], predicate: Callable[[ConfigSource], bool]) -> Optional[str]:
|
|
247
|
+
for key in keys:
|
|
248
|
+
for src in registry.sources:
|
|
249
|
+
if predicate(src):
|
|
250
|
+
v = src.get(key)
|
|
251
|
+
if v is not None:
|
|
252
|
+
return v
|
|
253
|
+
return None
|
|
254
|
+
|
|
255
|
+
def _resolve_path_from_files(registry: ConfigRegistry, dotted_keys: Tuple[str, ...]) -> Optional[str]:
|
|
256
|
+
for key in dotted_keys:
|
|
257
|
+
path = key.split(".")
|
|
258
|
+
for src in registry.sources:
|
|
259
|
+
if isinstance(src, FileSource):
|
|
260
|
+
# FileSource caches flattened dict already
|
|
261
|
+
v = src.get(key)
|
|
262
|
+
if v is not None:
|
|
263
|
+
return v
|
|
264
|
+
return None
|
|
265
|
+
|
|
266
|
+
def _flatten_obj(obj: Any, prefix: str = "") -> Dict[str, Any]:
|
|
267
|
+
out: Dict[str, Any] = {}
|
|
268
|
+
if isinstance(obj, dict):
|
|
269
|
+
for k, v in obj.items():
|
|
270
|
+
k2 = (prefix + "." + str(k)) if prefix else str(k)
|
|
271
|
+
out.update(_flatten_obj(v, k2))
|
|
272
|
+
elif isinstance(obj, (list, tuple)):
|
|
273
|
+
for i, v in enumerate(obj):
|
|
274
|
+
k2 = (prefix + "." + str(i)) if prefix else str(i)
|
|
275
|
+
out.update(_flatten_obj(v, k2))
|
|
276
|
+
else:
|
|
277
|
+
out[prefix] = obj
|
|
278
|
+
if "." in prefix:
|
|
279
|
+
# also expose leaf as KEY without dots if single-segment? no; keep dotted only
|
|
280
|
+
pass
|
|
281
|
+
# also expose top-level KEY without dots when no prefix used:
|
|
282
|
+
if prefix and "." not in prefix:
|
|
283
|
+
out[prefix] = obj
|
|
284
|
+
# Additionally mirror top-level simple keys as UPPERCASE for convenience
|
|
285
|
+
if prefix and "." not in prefix:
|
|
286
|
+
out[prefix.upper()] = obj
|
|
287
|
+
return out
|
|
288
|
+
|
|
289
|
+
def _strip_quotes(s: str) -> str:
|
|
290
|
+
if (s.startswith('"') and s.endswith('"')) or (s.startswith("'") and s.endswith("'")):
|
|
291
|
+
return s[1:-1]
|
|
292
|
+
return s
|
|
293
|
+
|
|
294
|
+
def _coerce_type(val: Any, ann: Any) -> Any:
|
|
295
|
+
if val is None:
|
|
296
|
+
return None
|
|
297
|
+
# strings from sources come as str; coerce to basic types
|
|
298
|
+
try:
|
|
299
|
+
from typing import get_origin, get_args
|
|
300
|
+
origin = get_origin(ann) or ann
|
|
301
|
+
if origin in (int,):
|
|
302
|
+
return int(val)
|
|
303
|
+
if origin in (float,):
|
|
304
|
+
return float(val)
|
|
305
|
+
if origin in (bool,):
|
|
306
|
+
s = str(val).strip().lower()
|
|
307
|
+
if s in ("1","true","yes","y","on"): return True
|
|
308
|
+
if s in ("0","false","no","n","off"): return False
|
|
309
|
+
return bool(val)
|
|
310
|
+
except Exception:
|
|
311
|
+
pass
|
|
312
|
+
return val
|
|
313
|
+
|
|
314
|
+
def _get_type_hints_safe(fn, owner=None):
|
|
315
|
+
try:
|
|
316
|
+
import inspect
|
|
317
|
+
mod = inspect.getmodule(fn)
|
|
318
|
+
g = getattr(mod, "__dict__", {})
|
|
319
|
+
l = vars(owner) if owner is not None else None
|
|
320
|
+
from typing import get_type_hints
|
|
321
|
+
return get_type_hints(fn, globalns=g, localns=l, include_extras=True)
|
|
322
|
+
except Exception:
|
|
323
|
+
return {}
|
|
324
|
+
|
|
325
|
+
# ---- Public API helpers to be imported by users ----
|
|
326
|
+
|
|
327
|
+
__all__ = [
|
|
328
|
+
"config_component", "EnvSource", "FileSource",
|
|
329
|
+
"Env", "File", "Path", "Value",
|
|
330
|
+
"ConfigRegistry", "register_field_spec", "is_config_component",
|
|
331
|
+
]
|
|
332
|
+
|