plugantic 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
plugantic/__init__.py ADDED
@@ -0,0 +1,3 @@
1
+ """Plugantic: Simplify extendable composition with Pydantic."""
2
+ from ._consts import version as __version__
3
+ from .plugin import PluginModel, PluginDowncastHandler
plugantic/_consts.py ADDED
@@ -0,0 +1,2 @@
1
+ # Single source of truth for information needed at runtime and compile-time (i.e. version)
2
+ version = "0.1.0"
plugantic/_helpers.py ADDED
@@ -0,0 +1,42 @@
1
+ from __future__ import annotations
2
+
3
+ from typing_extensions import TypeVar, Iterable, TypeGuard, TypeAliasType, Callable
4
+ from itertools import combinations, product
5
+
6
+ T = TypeVar("T")
7
+ RecursiveList = TypeAliasType("RecursiveList", Iterable["RecursiveListItem[T]"], type_params=(T,))
8
+ RecursiveListItem = TypeAliasType("RecursiveListItem", T | RecursiveList[T], type_params=(T,))
9
+
10
+ def recursive_linear(items: RecursiveList[T], typeguard: Callable[[RecursiveListItem[T]], TypeGuard[T]], join: Callable[[Iterable[T]], T]) -> Iterable[T]:
11
+ result = []
12
+ for item in items:
13
+ if typeguard(item):
14
+ result.append(item)
15
+ else:
16
+ result.extend(recursive_powerset(item, typeguard, join))
17
+ return result
18
+
19
+ def recursive_powerset(items: RecursiveList[T], typeguard: Callable[[RecursiveListItem[T]], TypeGuard[T]], join: Callable[[Iterable[T]], T]) -> Iterable[T]:
20
+ arbitrary_subset = []
21
+ subsets = []
22
+ for downcast in items:
23
+ if typeguard(downcast):
24
+ arbitrary_subset.append(downcast)
25
+ else:
26
+ linear_subset = recursive_linear(downcast, typeguard, join)
27
+ linear_subset = ((), *((d,) for d in linear_subset))
28
+ subsets.append(linear_subset)
29
+
30
+ arbitrary_powerset = [()]
31
+ for l in range(1, len(arbitrary_subset) + 1):
32
+ arbitrary_powerset.extend(combinations(arbitrary_subset, l))
33
+ subsets.append(arbitrary_powerset)
34
+
35
+ powerset = []
36
+ for subset in product(*subsets):
37
+ callbacks = [c for s in subset for c in s]
38
+ if not callbacks:
39
+ continue
40
+ powerset.append(join(callbacks))
41
+
42
+ return powerset
plugantic/_types.py ADDED
@@ -0,0 +1,6 @@
1
+ class _InjectBase:
2
+ def __init__(self, *bases: type):
3
+ self._bases = bases
4
+ def __mro_entries__(self, bases):
5
+ return tuple(base for base in self._bases if base not in bases)
6
+ _VanishBase = _InjectBase()
plugantic/plugin.py ADDED
@@ -0,0 +1,409 @@
1
+ from __future__ import annotations
2
+
3
+ from typing_extensions import ClassVar, Iterable, Type, Union, Self, Literal, Any, Callable, Generic, TypeVar, TypeVarTuple, Unpack, TypeAliasType, TypedDict, get_origin, get_args, TYPE_CHECKING
4
+ from pydantic import BaseModel, GetCoreSchemaHandler, model_validator
5
+ from pydantic_core.core_schema import tagged_union_schema
6
+
7
+ from ._helpers import recursive_powerset, recursive_linear, RecursiveList
8
+ from ._types import _VanishBase
9
+
10
+ Ts = TypeVarTuple("Ts")
11
+
12
+ if TYPE_CHECKING:
13
+ _plugin_base = Generic[Unpack[Ts]]
14
+ else:
15
+ _plugin_base = _VanishBase
16
+
17
+ class PluganticConfig(TypedDict):
18
+ varname_type: str|None = None
19
+ value: str|None = None
20
+ supported_features: set[Any]|tuple[Any, ...]=()
21
+ required_features: _RequiresFeatureSpec|None=None
22
+ auto_downcasts: PluginDowncastCallbacks|None=None
23
+
24
+ class PluginModel(BaseModel, _plugin_base):
25
+ __plugantic_varname_type__: ClassVar[str] = "type"
26
+ __plugantic_supported_features__: ClassVar[set[Any]] = set()
27
+ __plugantic_required_features__: ClassVar[_RequiresFeatureSpec|None] = None
28
+ __plugantic_inherit_features__: ClassVar[bool] = True
29
+ __plugantic_generic_supertype__: ClassVar[type|None] = None
30
+ __plugantic_auto_downcasts__: ClassVar[set[Self]|None] = None
31
+ __plugantic_auto_downcast_callbacks__: ClassVar[PluginDowncastCallbacks|None] = None
32
+ __plugantic_was_schema_created__: ClassVar[bool] = False
33
+ __plugantic_check_schema_usage__: ClassVar[bool] = True
34
+
35
+ plugantic_config: ClassVar[PluganticConfig|None] = None
36
+
37
+ def __init__(self, *args, **kwargs):
38
+ declared_type = self._get_declared_type()
39
+ if declared_type:
40
+ kwargs = {
41
+ self.__plugantic_varname_type__: declared_type,
42
+ **kwargs
43
+ }
44
+ super().__init__(*args, **kwargs)
45
+
46
+ def __class_getitem__(cls, args):
47
+ if not isinstance(args, tuple):
48
+ args = (args,)
49
+ requires_features, supports_features = cls._unpack_features(*args)
50
+ return type(cls.__name__, (cls,), {}, supported_features=supports_features, required_features=requires_features, _plugantic_generic_supertype=cls)
51
+
52
+ def __init_subclass__(cls, *,
53
+ varname_type: str|None=None,
54
+ value: str|None=None,
55
+ supported_features: set[Any]|tuple[Any, ...]=(),
56
+ required_features: _RequiresFeatureSpec|None=None,
57
+ auto_downcasts: PluginDowncastCallbacks|None=None,
58
+ _plugantic_generic_supertype: type|None=None,
59
+ _plugantic_downcast_callback: SimplePluginDowncastCallback|None=None,
60
+ **kwargs):
61
+ if cls._check_plugantic_schema_usage():
62
+ raise ValueError(f"Schema of {cls.__name__} has already been created. Creating new subclasses after the schema has been created will lead to undefined behaviour.")
63
+
64
+ super().__init_subclass__(**kwargs)
65
+
66
+ if cls.plugantic_config:
67
+ varname_type = cls.plugantic_config.get("varname_type", None) or varname_type
68
+ value = cls.plugantic_config.get("value", None) or value
69
+ supported_features = cls.plugantic_config.get("supported_features", ()) or supported_features
70
+ required_features = cls.plugantic_config.get("required_features", None) or required_features
71
+ auto_downcasts = cls.plugantic_config.get("auto_downcasts", None) or auto_downcasts
72
+
73
+ cls.__plugantic_was_schema_created__ = False
74
+
75
+ cls.__plugantic_generic_supertype__ = _plugantic_generic_supertype
76
+ cls.__plugantic_required_features__ = required_features
77
+ cls.__plugantic_auto_downcasts__ = None
78
+
79
+ supported_features = set(supported_features)
80
+ if cls.__plugantic_inherit_features__:
81
+ supported_features = cls.__plugantic_supported_features__ | supported_features
82
+
83
+ cls.__plugantic_supported_features__ = supported_features
84
+
85
+ if varname_type is not None:
86
+ cls.__plugantic_varname_type__ = varname_type
87
+
88
+ if value is not None:
89
+ cls._create_annotation(cls.__plugantic_varname_type__, Literal[value])
90
+
91
+ cls._ensure_varname_default()
92
+
93
+ if _plugantic_downcast_callback:
94
+ _plugantic_downcast_callback(PluginDowncastHandler(cls))
95
+
96
+ cls.__plugantic_auto_downcast_callbacks__ = auto_downcasts
97
+
98
+ @classmethod
99
+ def _ensure_downcasts(cls):
100
+ if not cls.__plugantic_auto_downcast_callbacks__:
101
+ return
102
+
103
+ callbacks = cls.__plugantic_auto_downcast_callbacks__
104
+ cls.__plugantic_auto_downcast_callbacks__ = None
105
+
106
+ cls.__plugantic_auto_downcasts__ = cls._create_downcasts(callbacks)
107
+
108
+ @classmethod
109
+ def _create_downcasts(cls, downcast_callbacks: PluginDowncastCallbacks):
110
+ downcasts = set()
111
+ for callback in cls._create_powerset_downcast_callbacks(downcast_callbacks):
112
+ subcls = type(cls.__name__, (cls,), {}, _plugantic_downcast_callback=callback)
113
+ downcasts.add(subcls)
114
+ return downcasts
115
+
116
+ @classmethod
117
+ def _create_linear_downcast_callbacks(cls, downcasts: PluginDowncastCallbacks):
118
+ return recursive_linear(downcasts, callable, cls._create_joined_downcast_callback)
119
+
120
+ @classmethod
121
+ def _create_powerset_downcast_callbacks(cls, downcasts: PluginDowncastCallbacks):
122
+ return recursive_powerset(downcasts, callable, cls._create_joined_downcast_callback)
123
+
124
+ @classmethod
125
+ def _create_joined_downcast_callback(cls, downcasts: PluginDowncastCallbacks) -> SimplePluginDowncastCallback:
126
+ def callback(handler: PluginDowncastHandler):
127
+ for downcast in downcasts:
128
+ downcast(handler)
129
+
130
+ return callback
131
+
132
+ @classmethod
133
+ def _unpack_features(cls, *features: Any) -> tuple[_RequiresFeatureSpec, set[Any]]:
134
+ requires_all_features = set()
135
+ supports_features = set()
136
+
137
+ for feature in features:
138
+ if isinstance(feature, _RequiresFeatureSpec):
139
+ requires_all_features.add(feature)
140
+ continue
141
+
142
+ _any_features = set()
143
+ if get_origin(feature) is Union:
144
+ for sub_feature in get_args(feature):
145
+ _any_features.update(cls._unwrap_feature(sub_feature))
146
+ if get_origin(feature) is Literal:
147
+ _any_features.update(get_args(feature))
148
+
149
+ if len(_any_features) == 1:
150
+ feature = _any_features.pop()
151
+
152
+ if _any_features:
153
+ requires_all_features.add(_RequiresAnyFeature(any_of=_any_features))
154
+ supports_features.update(_any_features)
155
+ continue
156
+
157
+ requires_all_features.add(feature)
158
+ supports_features.add(feature)
159
+
160
+ return _RequiresAllFeatures(all_of=requires_all_features), supports_features
161
+
162
+ @classmethod
163
+ def _unwrap_feature(cls, feature: Any) -> tuple[Any]:
164
+ if get_origin(feature) is Literal:
165
+ return get_args(feature)
166
+ return (feature,)
167
+
168
+ @classmethod
169
+ def _create_subclass(cls):
170
+ return type(cls.__name__, (cls,), {})
171
+
172
+ @classmethod
173
+ def _create_annotation(cls, name: str, value: Any):
174
+ """
175
+ Create an annotation of value for the given name as a member variable of the class
176
+ e.g. name="type" value=Literal["test"] -> `type: Literal["test"]`
177
+ """
178
+ if not hasattr(cls, "__annotations__"):
179
+ cls.__annotations__ = {}
180
+ if not cls.__annotations__.get(name, None):
181
+ cls.__annotations__[name] = value
182
+
183
+ _NoValue = object()
184
+ @classmethod
185
+ def _create_field_default(cls, name: str, value: Any):
186
+ actual_value = getattr(cls, name, cls._NoValue)
187
+ if actual_value == value:
188
+ return
189
+ setattr(cls, name, value)
190
+
191
+ @classmethod
192
+ def _ensure_varname_default(cls):
193
+ """
194
+ Ensure that the discriminator name is associated with a value so that creating a direct instance does not require passing the value again
195
+ e.g.:
196
+ class SomeConfig(PluginModel):
197
+ type: Literal["something"] # will be transformed to the equivalent of `type: Literal["something"] = "something"`
198
+
199
+ SomeConfig() # works, because there is a default value set
200
+ SomeConfig(type="something") # works
201
+ SomeConfig(type="else") # fails
202
+ """
203
+ declared_type = cls._get_declared_type()
204
+ if not declared_type:
205
+ return
206
+ cls._create_field_default(cls.__plugantic_varname_type__, declared_type)
207
+
208
+ @classmethod
209
+ def _get_declared_type(cls) -> str|None:
210
+ """Get the value declared for the discriminator name (e.g. `type: Literal["something"]` -> "something")"""
211
+ field = getattr(cls, "__annotations__", {}).get(cls.__plugantic_varname_type__, None)
212
+
213
+ if (not field) and hasattr(cls, "model_fields"):
214
+ field = cls.model_fields.get(cls.__plugantic_varname_type__, None)
215
+ if field:
216
+ field = field.annotation
217
+
218
+ if get_origin(field) is Literal:
219
+ return get_args(field)[0]
220
+
221
+ return None
222
+
223
+ @classmethod
224
+ def _supports_features(cls, features: _RequiresFeatureSpec|None) -> bool:
225
+ if features is None:
226
+ return True
227
+
228
+ return features.applies_to(cls.__plugantic_supported_features__)
229
+
230
+ @classmethod
231
+ def _is_valid_subclass(cls, filter: _PluginFeatureFilter) -> bool:
232
+ if cls.__plugantic_generic_supertype__:
233
+ return False
234
+
235
+ if not cls._supports_features(filter.required_features):
236
+ return False
237
+
238
+ if cls._get_declared_type():
239
+ return True
240
+
241
+ return False
242
+
243
+ @classmethod
244
+ def _select_optimal_subclass(cls, subclasses: Iterable[Self], filter: _PluginFeatureFilter) -> Self|None:
245
+ """
246
+ Select the optimal subclass from a list of subclasses given a feature filter
247
+
248
+ This is primarily used to select the least restrictive subclass for automatic downcasting
249
+ We assume that the subclass with the fewest changes (i.e. with the fewest added features) is the least restrictive.
250
+ Thus, the optimal subclass is the one that has the fewest overall features.
251
+ In the future this might be changed to the subclass with the fewest differences between the subclass and the base class.
252
+ """
253
+ optimal = None
254
+ for subcls in subclasses:
255
+ if optimal is None:
256
+ optimal = subcls
257
+
258
+ if len(subcls.__plugantic_supported_features__) < len(optimal.__plugantic_supported_features__):
259
+ optimal = subcls
260
+
261
+ return optimal
262
+
263
+ @classmethod
264
+ def _get_all_subclasses(cls):
265
+ cls._ensure_downcasts()
266
+ if not cls.__plugantic_auto_downcasts__:
267
+ return cls.__subclasses__()
268
+
269
+ return [subcls for subcls in cls.__subclasses__() if not subcls in cls.__plugantic_auto_downcasts__]
270
+
271
+ @classmethod
272
+ def _get_valid_self_class(cls, filter: _PluginFeatureFilter) -> Type[Self]|None:
273
+ if cls._is_valid_subclass(filter):
274
+ return cls
275
+
276
+ cls._ensure_downcasts()
277
+ if not cls.__plugantic_auto_downcasts__:
278
+ return None
279
+
280
+ return cls._select_optimal_subclass(cls._get_valid_downcast_subclasses(filter), filter)
281
+
282
+ @classmethod
283
+ def _get_valid_downcast_subclasses(cls, filter: _PluginFeatureFilter) -> Iterable[Type[Self]]:
284
+ return [subcls for subcls in cls.__plugantic_auto_downcasts__ if subcls._is_valid_subclass(filter)]
285
+
286
+ @classmethod
287
+ def _get_valid_subclasses(cls, filter: _PluginFeatureFilter) -> Iterable[Type[Self]]:
288
+ valid = []
289
+
290
+ valid_self_class = cls._get_valid_self_class(filter)
291
+ if valid_self_class:
292
+ valid.append(valid_self_class)
293
+
294
+ for subcls in cls._get_all_subclasses():
295
+ valid.extend(subcls._get_valid_subclasses(filter))
296
+
297
+ return valid
298
+
299
+ @classmethod
300
+ def _as_tagged_union(cls, handler: GetCoreSchemaHandler, filter: _PluginFeatureFilter):
301
+ subclasses = set(cls._get_valid_subclasses(filter))
302
+ if len(subclasses) == 1:
303
+ return handler(subclasses.pop())
304
+
305
+ choices = {subcls._get_declared_type(): handler(subcls) for subcls in subclasses}
306
+ return tagged_union_schema(choices, discriminator=cls.__plugantic_varname_type__)
307
+
308
+ @classmethod
309
+ def __get_pydantic_core_schema__(cls, source, handler: GetCoreSchemaHandler):
310
+ _required_features = None
311
+ _base = cls
312
+
313
+ if cls.__plugantic_generic_supertype__:
314
+ _required_features = cls.__plugantic_required_features__
315
+ _base = cls.__plugantic_generic_supertype__
316
+
317
+ _filter = _PluginFeatureFilter(required_features=_required_features)
318
+
319
+ cls.__plugantic_was_schema_created__ = True
320
+
321
+ return _base._as_tagged_union(handler, _filter)
322
+
323
+ @classmethod
324
+ def _check_plugantic_schema_usage(cls) -> bool:
325
+ """
326
+ Return True if the schema of this class or any of its superclasses has been created
327
+ This check can be circumvented by setting __plugantic_check_schema_usage__ to False
328
+ """
329
+ if not cls.__plugantic_check_schema_usage__:
330
+ return False
331
+ for supcls in cls.mro():
332
+ if not issubclass(supcls, PluginModel):
333
+ continue
334
+ if supcls.__plugantic_was_schema_created__:
335
+ return True
336
+ return False
337
+
338
+ @model_validator(mode="wrap")
339
+ def _try_downcast(cls, data, handler):
340
+ if isinstance(data, cls):
341
+ pass
342
+ elif cls.__plugantic_generic_supertype__ and isinstance(data, cls.__plugantic_generic_supertype__):
343
+ try:
344
+ data = cls(**data.model_dump())
345
+ except Exception as e:
346
+ raise ValueError(f"Failed to downcast given {repr(data)} to required {cls.__name__}; please provide the required config directly") from e
347
+ return handler(data)
348
+
349
+ model_config = {"defer_build": True}
350
+
351
+ class _RequiresFeatureSpec:
352
+ def applies_to(self, supported_features: set[Any]) -> bool:
353
+ ...
354
+
355
+ def _split_features(self, features: set[Any]) -> tuple[set[Any], set[Self]]:
356
+ feats, specs = set(), set()
357
+ for feature in features:
358
+ if isinstance(feature, _RequiresFeatureSpec):
359
+ specs.add(feature)
360
+ else:
361
+ feats.add(feature)
362
+ return feats, specs
363
+
364
+ class _RequiresAnyFeature(_RequiresFeatureSpec):
365
+ def __init__(self, *, any_of: set[Any]):
366
+ self.any_of_features, self.any_of_specs = self._split_features(any_of)
367
+
368
+ def applies_to(self, supported_features) -> bool:
369
+ return (not self.any_of_features.isdisjoint(supported_features)) or any(spec.applies_to(supported_features) for spec in self.any_of_specs)
370
+
371
+ class _RequiresAllFeatures(_RequiresFeatureSpec):
372
+ def __init__(self, *, all_of: set[Any]):
373
+ self.all_of_features, self.all_of_specs = self._split_features(all_of)
374
+
375
+ def applies_to(self, supported_features) -> bool:
376
+ return self.all_of_features.issubset(supported_features) and all(spec.applies_to(supported_features) for spec in self.all_of_specs)
377
+
378
+ class _PluginFeatureFilter:
379
+ def __init__(self, *, required_features: _RequiresFeatureSpec|None=None):
380
+ self.required_features = required_features
381
+
382
+ T = TypeVar("T", bound=PluginModel)
383
+
384
+ class PluginDowncastHandler(Generic[T]):
385
+ def __init__(self, wraps: Type[T]):
386
+ self.wraps = wraps
387
+
388
+ def get_raw_type(self) -> Type[T]:
389
+ return self.wraps
390
+
391
+ def enable_feature(self, feature: Any):
392
+ features = self.wraps._unwrap_feature(feature)
393
+ self.wraps.__plugantic_supported_features__.update(features)
394
+
395
+ def disable_feature(self, feature: Any):
396
+ for item in self.wraps._unwrap_feature(feature):
397
+ self.wraps.__plugantic_supported_features__.discard(item)
398
+
399
+ def set_field_annotation(self, name: str, annotation: Type):
400
+ self.wraps._create_annotation(name, annotation)
401
+
402
+ def set_field_default(self, name: str, value: Any):
403
+ self.wraps._create_field_default(name, value)
404
+
405
+ def remove_field_default(self, name: str):
406
+ self.set_field_default(name, ...)
407
+
408
+ SimplePluginDowncastCallback = TypeAliasType("SimplePluginDowncastCallback", Callable[[PluginDowncastHandler[T]], None], type_params=(T,))
409
+ PluginDowncastCallbacks = TypeAliasType("PluginDowncastCallbacks", RecursiveList[SimplePluginDowncastCallback[T]], type_params=(T,))
@@ -0,0 +1,259 @@
1
+ Metadata-Version: 2.4
2
+ Name: plugantic
3
+ Version: 0.1.0
4
+ Summary: Simplified extendable composition with pydantic
5
+ Author-email: Martin Kunze <martin@martinkunze.com>
6
+ Project-URL: Homepage, https://github.com/martinkunze/plugantic
7
+ Project-URL: Documentation, https://github.com/martinkunze/plugantic/blob/main/README.md
8
+ Project-URL: Source, https://github.com/martinkunze/plugantic
9
+ Requires-Python: >=3.8
10
+ Description-Content-Type: text/markdown
11
+ License-File: LICENSE
12
+ Requires-Dist: pydantic
13
+ Requires-Dist: propert
14
+ Requires-Dist: typing-extensions
15
+ Dynamic: license-file
16
+
17
+ # 🧩 Plugantic - Simplified extendable composition with pydantic
18
+
19
+ ## πŸ€” Why use `plugantic`?
20
+
21
+ You may have learned that you should avoid inheritance in favor of composition. When using pydantic you can achieve that by using something like the following:
22
+
23
+ ```python
24
+ # Declare a base config
25
+ class OutputConfig(BaseModel):
26
+ mode: str
27
+ def print(self): ...
28
+
29
+ # Declare all implementations of the base config
30
+ class TextConfig(OutputConfig):
31
+ mode: Literal["text"] = "text"
32
+ text: str
33
+ def print(self):
34
+ print(self.text)
35
+
36
+ class NumberConfig(OutputConfig):
37
+ mode: Literal["number"] = "number"
38
+ number: float
39
+ precision: int = 2
40
+ def print(self):
41
+ print(f"{self.number:.{self.precision}f}")
42
+
43
+ # Define a union type of all implementations
44
+ AllOutputConfigs = Annotated[Union[
45
+ TextConfig,
46
+ NumberConfig,
47
+ ], Field(discriminator="mode")]
48
+
49
+ # Use the union type in your model
50
+ class CommonConfig(BaseModel):
51
+ output: AllOutputConfigs
52
+
53
+ ...
54
+
55
+ CommonConfig.model_validate({"output": {
56
+ "mode": "text",
57
+ "text": "Hello World"
58
+ }})
59
+ ```
60
+
61
+ Whilst this works, there are multiple issues and annoyances with that approach:
62
+ - **Hard to maintain**: you need to declare a type union and update it with every change
63
+ - **Not extensible**: adding a different config afterwards would required to update the `AllOutputConfigs` type and all of the objects using it
64
+ - **Redundant definition** of the discriminator field (i.e. `Literal[<x>] = <x>`)
65
+
66
+ This library solves all of these issues (and more), so you can just write
67
+
68
+ ```python
69
+ from plugantic import PluginModel
70
+
71
+ class OutputConfig(PluginModel):
72
+ mode: str
73
+ def print(self): ...
74
+
75
+ class TextConfig(OutputConfig):
76
+ # No redundant "text" definition here!
77
+ mode: Literal["text"]
78
+ text: str
79
+ def print(self):
80
+ print(self.text)
81
+
82
+ class NumberConfig(OutputConfig):
83
+ # No redundant definition here either!
84
+ mode: Literal["number"]
85
+ number: float
86
+ precision: int = 2
87
+ def print(self):
88
+ print(f"{self.number:.{self.precision}f}")
89
+
90
+ # No need to define a union type or a discriminator field!
91
+ # You can just use the base type as a field type!
92
+ class CommonConfig(BaseModel):
93
+ output: OutputConfig
94
+
95
+ # You can even add new configs after the fact!
96
+ class BytesConfig(OutputConfig):
97
+ mode: Literal["bytes"]
98
+ content: bytes
99
+ def print(self):
100
+ print(self.content.decode("utf-8"))
101
+
102
+ ...
103
+
104
+ # The actual type is only evaluated when it is actually needed!
105
+ CommonConfig.model_validate({"output": {
106
+ "mode": "text",
107
+ "text": "Hello World"
108
+ }})
109
+ ```
110
+
111
+ ## ✨ Features
112
+
113
+ ### πŸŒ€ Automatic Downcasts
114
+
115
+ Let's say you have the following logger:
116
+
117
+ ```python
118
+ FeatureNewPage = Literal["newline"]
119
+
120
+ class LoggerBase(PluginModel):
121
+ def log_line(self, line: str, new_page: bool=False): ...
122
+
123
+ class LoggerStdout(LoggerBase, value="stdout"):
124
+ new_page_token: str|None = None
125
+ def log_line(self, line: str, new_page: bool=False):
126
+ if new_page:
127
+ if not self.new_page_token:
128
+ raise ValueError("new_page_token is not set")
129
+ print(self.new_page_token)
130
+ print(line)
131
+
132
+ class Component1(BaseModel):
133
+ logger: LoggerBase
134
+
135
+ class Component2(BaseModel):
136
+ logger: LoggerBase[FeatureNewPage]
137
+ ```
138
+
139
+ then users could not use `Component2` with `LoggerStdout` as it does not support the `FeatureNewPage` feature, even thoudh `LoggerStdout` would support it, if `new_page_token: str` was enforced.
140
+
141
+ Conventionally, this would require the developer to create two classes (i.e. `LoggerStdout` and `LoggerStdoutNewPage`) and then include either one in the final annotated union depending on if the component requires the new page functionality.
142
+
143
+ With `plugantic`, you can automatically create subtypes that are more strict than the base type and they will be automatically validated and downcast when using the model:
144
+
145
+ ```python
146
+ def ensure_new_page_feature(handler: PluginDowncastHandler):
147
+ handler.enable_feature(FeatureNewPage)
148
+ handler.set_field_annotation("new_page_token", str)
149
+ handler.remove_field_default("new_page_token")
150
+
151
+ class LoggerBase(PluginModel, value="stdout", auto_downcasts=(ensure_new_page_feature,)):
152
+ new_page_token: str|None = None
153
+ def log_line(self, line: str, new_page: bool=False):
154
+ if new_page:
155
+ if not self.new_page_token:
156
+ raise ValueError("new_page_token is not set")
157
+ print(self.new_page_token)
158
+ print(line)
159
+ ```
160
+
161
+ By declaring multiple callbacks in `auto_downcasts`, you can create a superset of all possible downcasts and `plugantic` will automatically select the least strict depending on which features you require.
162
+
163
+
164
+ ### πŸ”Œ Extensibility
165
+
166
+ You can add new plugins after the fact!
167
+
168
+ To do so, you will have to ensure one of the following prerequisites:
169
+
170
+ **1. Use `ForwardRef`s**
171
+
172
+ ```python
173
+ from __future__ import annotations # either by importing annotations from the __future__ package
174
+
175
+ class BaseConfig(PluginModel):
176
+ ...
177
+
178
+ ...
179
+
180
+ class CommonConfig1(BaseModel):
181
+ config: BaseConfig
182
+
183
+ class CommonConfig2(BaseModel):
184
+ config: "BaseConfig" # or by using a string as the type annotation
185
+
186
+
187
+ class NumberConfig(BaseConfig): # now you can declare new types after the fact (but before using/validating the models)!
188
+ ...
189
+ ```
190
+
191
+ **2. Enable `defer_build`**
192
+
193
+ ```python
194
+ class BaseConfig(PluginModel):
195
+ ...
196
+
197
+ class CommonConfig(BaseModel):
198
+ config: BaseConfig
199
+
200
+ model_config = {"defer_build": True}
201
+ ```
202
+
203
+ ### πŸ“ Type Checker Friendliness
204
+
205
+ The type checker can infer the type of the plugin model, so you don't need to define a union type or a discriminator field!
206
+ Everything except for the annotated union is based on pydantic and as such can be used like before as type checkers are already familiar with pydantic.
207
+
208
+ ## πŸ›οΈ Leading Principles
209
+
210
+ ### Composition over Inheritance
211
+
212
+ Composition is preferred over inheritance.
213
+
214
+ ### Dont repeat yourself (DRY)
215
+
216
+ Having to inherit from a base class just to then declare an annotated union or having to declare a discriminator field both as an annotation and with a default being the same as the annotation is a violation of the DRY principle. This library tackles all of these issues at once.
217
+
218
+ ### Be conservative in what you send and liberal in what you accept
219
+
220
+ Using automatic downcasts, this library allows developers to accept every possible value when validating a model.
221
+
222
+
223
+ ## πŸ’» Development
224
+
225
+ ### πŸ“ Code structure
226
+
227
+ The code is structured as follows:
228
+
229
+ - `src/plugantic/` contains the source code
230
+ - `tests/` contains the tests
231
+
232
+ Most of the actual logic is in the `src/plugantic/plugin.py` file.
233
+
234
+ ### πŸ“¦ Distribution
235
+
236
+ To build the package, you can do the following:
237
+
238
+ ```bash
239
+ uv run build
240
+ ```
241
+
242
+ <details>
243
+ <summary>Publishing</summary>
244
+
245
+ > πŸ’‘ This section is primarily relevant for the maintainers of this package (me), as it requires permission to push a package to the `plugantic` repository on PyPI.
246
+
247
+ ```bash
248
+ uv run publish --token <token>
249
+ ```
250
+
251
+ </details>
252
+
253
+ ### 🎯 Tests
254
+
255
+ To run all tests, you can do the following:
256
+
257
+ ```bash
258
+ uv run pytest
259
+ ```
@@ -0,0 +1,10 @@
1
+ plugantic/__init__.py,sha256=MTKfwiTeL6NXzc5BFXzOW-9h1g3JdMvtv-fWjvar82g,163
2
+ plugantic/_consts.py,sha256=j2iVE7WWaZT7gutn9Fe2HNKSOjXI-X-bpvhvoZaoi4w,109
3
+ plugantic/_helpers.py,sha256=YQVGLjpQ-WhIFKt_sCKIyo1LOOqeZYj9TNy5y6xwkHE,1636
4
+ plugantic/_types.py,sha256=a16kLslWFNrVUpON_GVG4iJA-Mr_GeCQre1TAlvKiqM,223
5
+ plugantic/plugin.py,sha256=6ZM5zK2FMqHjREcgcETC3Cx_ep1sFR1SLzYpZwBmn70,16461
6
+ plugantic-0.1.0.dist-info/licenses/LICENSE,sha256=5E8cRbGFBw4uIbIkiaxH_LFrI-_4nsn_YElk0LMkiL8,1060
7
+ plugantic-0.1.0.dist-info/METADATA,sha256=RPkxeT0JzDlAOQDM8tUehPIYAlT-AQTU0RjIsw_kBp0,7614
8
+ plugantic-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
9
+ plugantic-0.1.0.dist-info/top_level.txt,sha256=xiHgfBTODu2aghczK5ZLmAvc1ZPLqIWXx0H0NU4Rr7A,10
10
+ plugantic-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,7 @@
1
+ Copyright 2025 Martin Kunze
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4
+
5
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6
+
7
+ THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1 @@
1
+ plugantic