weakincentives 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. weakincentives/__init__.py +67 -0
  2. weakincentives/adapters/__init__.py +37 -0
  3. weakincentives/adapters/_names.py +32 -0
  4. weakincentives/adapters/_provider_protocols.py +69 -0
  5. weakincentives/adapters/_tool_messages.py +80 -0
  6. weakincentives/adapters/core.py +102 -0
  7. weakincentives/adapters/litellm.py +254 -0
  8. weakincentives/adapters/openai.py +254 -0
  9. weakincentives/adapters/shared.py +1021 -0
  10. weakincentives/cli/__init__.py +23 -0
  11. weakincentives/cli/wink.py +58 -0
  12. weakincentives/dbc/__init__.py +412 -0
  13. weakincentives/deadlines.py +58 -0
  14. weakincentives/prompt/__init__.py +105 -0
  15. weakincentives/prompt/_generic_params_specializer.py +64 -0
  16. weakincentives/prompt/_normalization.py +48 -0
  17. weakincentives/prompt/_overrides_protocols.py +33 -0
  18. weakincentives/prompt/_types.py +34 -0
  19. weakincentives/prompt/chapter.py +146 -0
  20. weakincentives/prompt/composition.py +281 -0
  21. weakincentives/prompt/errors.py +57 -0
  22. weakincentives/prompt/markdown.py +108 -0
  23. weakincentives/prompt/overrides/__init__.py +59 -0
  24. weakincentives/prompt/overrides/_fs.py +164 -0
  25. weakincentives/prompt/overrides/inspection.py +141 -0
  26. weakincentives/prompt/overrides/local_store.py +275 -0
  27. weakincentives/prompt/overrides/validation.py +534 -0
  28. weakincentives/prompt/overrides/versioning.py +269 -0
  29. weakincentives/prompt/prompt.py +353 -0
  30. weakincentives/prompt/protocols.py +103 -0
  31. weakincentives/prompt/registry.py +375 -0
  32. weakincentives/prompt/rendering.py +288 -0
  33. weakincentives/prompt/response_format.py +60 -0
  34. weakincentives/prompt/section.py +166 -0
  35. weakincentives/prompt/structured_output.py +179 -0
  36. weakincentives/prompt/tool.py +397 -0
  37. weakincentives/prompt/tool_result.py +30 -0
  38. weakincentives/py.typed +0 -0
  39. weakincentives/runtime/__init__.py +82 -0
  40. weakincentives/runtime/events/__init__.py +126 -0
  41. weakincentives/runtime/events/_types.py +110 -0
  42. weakincentives/runtime/logging.py +284 -0
  43. weakincentives/runtime/session/__init__.py +46 -0
  44. weakincentives/runtime/session/_slice_types.py +24 -0
  45. weakincentives/runtime/session/_types.py +55 -0
  46. weakincentives/runtime/session/dataclasses.py +29 -0
  47. weakincentives/runtime/session/protocols.py +34 -0
  48. weakincentives/runtime/session/reducer_context.py +40 -0
  49. weakincentives/runtime/session/reducers.py +82 -0
  50. weakincentives/runtime/session/selectors.py +56 -0
  51. weakincentives/runtime/session/session.py +387 -0
  52. weakincentives/runtime/session/snapshots.py +310 -0
  53. weakincentives/serde/__init__.py +19 -0
  54. weakincentives/serde/_utils.py +240 -0
  55. weakincentives/serde/dataclass_serde.py +55 -0
  56. weakincentives/serde/dump.py +189 -0
  57. weakincentives/serde/parse.py +417 -0
  58. weakincentives/serde/schema.py +260 -0
  59. weakincentives/tools/__init__.py +154 -0
  60. weakincentives/tools/_context.py +38 -0
  61. weakincentives/tools/asteval.py +853 -0
  62. weakincentives/tools/errors.py +26 -0
  63. weakincentives/tools/planning.py +831 -0
  64. weakincentives/tools/podman.py +1655 -0
  65. weakincentives/tools/subagents.py +346 -0
  66. weakincentives/tools/vfs.py +1390 -0
  67. weakincentives/types/__init__.py +35 -0
  68. weakincentives/types/json.py +45 -0
  69. weakincentives-0.9.0.dist-info/METADATA +775 -0
  70. weakincentives-0.9.0.dist-info/RECORD +73 -0
  71. weakincentives-0.9.0.dist-info/WHEEL +4 -0
  72. weakincentives-0.9.0.dist-info/entry_points.txt +2 -0
  73. weakincentives-0.9.0.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,310 @@
1
+ # Licensed under the Apache License, Version 2.0 (the "License");
2
+ # you may not use this file except in compliance with the License.
3
+ # You may obtain a copy of the License at
4
+ #
5
+ # http://www.apache.org/licenses/LICENSE-2.0
6
+ #
7
+ # Unless required by applicable law or agreed to in writing, software
8
+ # distributed under the License is distributed on an "AS IS" BASIS,
9
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10
+ # See the License for the specific language governing permissions and
11
+ # limitations under the License.
12
+
13
+ """Snapshot serialization utilities for :mod:`weakincentives.runtime.session`."""
14
+
15
+ from __future__ import annotations
16
+
17
+ import json
18
+ import types
19
+ from collections.abc import Mapping
20
+ from dataclasses import dataclass, field, is_dataclass
21
+ from datetime import UTC, datetime
22
+ from importlib import import_module
23
+ from typing import Any, TypeGuard, cast, override
24
+
25
+ from ...prompt._types import SupportsDataclass
26
+ from ...serde import dump, parse
27
+ from ...types import JSONValue
28
+ from ._slice_types import SessionSlice, SessionSliceType
29
+ from .dataclasses import is_dataclass_instance
30
+
31
+ SNAPSHOT_SCHEMA_VERSION = "1"
32
+
33
+
34
+ type SnapshotState = Mapping[SessionSliceType, SessionSlice]
35
+
36
+
37
+ class SnapshotSerializationError(RuntimeError):
38
+ """Raised when snapshot capture fails due to unsupported payloads."""
39
+
40
+
41
+ class SnapshotRestoreError(RuntimeError):
42
+ """Raised when snapshot restoration fails due to incompatible payloads."""
43
+
44
+
45
+ def normalize_snapshot_state(
46
+ state: Mapping[SessionSliceType, SessionSlice],
47
+ ) -> SnapshotState:
48
+ """Validate snapshot state and return an immutable copy."""
49
+
50
+ normalized: dict[SessionSliceType, SessionSlice] = {}
51
+ for slice_key, values in state.items():
52
+ if not _is_dataclass_type(slice_key):
53
+ raise ValueError("Slice keys must be dataclass types")
54
+
55
+ slice_type = slice_key
56
+
57
+ items: list[SupportsDataclass] = []
58
+ for value in values:
59
+ if not is_dataclass_instance(value):
60
+ raise ValueError(
61
+ f"Slice {slice_type.__qualname__} contains non-dataclass value"
62
+ )
63
+ try:
64
+ _ = dump(value)
65
+ except Exception as error:
66
+ raise ValueError(
67
+ f"Slice {slice_type.__qualname__} cannot be serialized"
68
+ ) from error
69
+ items.append(value)
70
+
71
+ normalized[slice_type] = tuple(items)
72
+
73
+ return cast(SnapshotState, types.MappingProxyType(normalized))
74
+
75
+
76
+ def _type_identifier(cls: SessionSliceType) -> str:
77
+ return f"{cls.__module__}:{cls.__qualname__}"
78
+
79
+
80
+ def _resolve_type(identifier: str) -> SessionSliceType:
81
+ module_name, _, qualname = identifier.partition(":")
82
+ if not module_name or not qualname:
83
+ msg = f"Invalid type identifier: {identifier!r}"
84
+ raise SnapshotRestoreError(msg)
85
+ module = import_module(module_name)
86
+ target: Any = module
87
+ for part in qualname.split("."):
88
+ target = getattr(target, part, None)
89
+ if target is None:
90
+ msg = f"Type {identifier!r} could not be resolved"
91
+ raise SnapshotRestoreError(msg)
92
+ if not isinstance(target, type):
93
+ msg = f"Resolved object for {identifier!r} is not a type"
94
+ raise SnapshotRestoreError(msg)
95
+ return cast(SessionSliceType, target)
96
+
97
+
98
+ def _ensure_timezone(dt: datetime) -> datetime:
99
+ if dt.tzinfo is None:
100
+ return dt.replace(tzinfo=UTC)
101
+ return dt
102
+
103
+
104
+ def _infer_item_type(
105
+ slice_type: SessionSliceType, values: SessionSlice
106
+ ) -> SessionSliceType:
107
+ if values:
108
+ first_value = values[0]
109
+ first_type = type(first_value)
110
+ for value in values:
111
+ if type(value) is not first_type: # intentional identity check
112
+ msg = (
113
+ "Snapshot slices must contain a single dataclass type; "
114
+ f"found {type(value)!r}"
115
+ )
116
+ raise SnapshotSerializationError(msg)
117
+ return first_type
118
+ return slice_type
119
+
120
+
121
+ def _is_dataclass_type(value: object) -> TypeGuard[type[SupportsDataclass]]:
122
+ return isinstance(value, type) and is_dataclass(value)
123
+
124
+
125
+ @dataclass(slots=True, frozen=True)
126
+ class SnapshotSlicePayload:
127
+ """Typed representation of a serialized snapshot slice entry."""
128
+
129
+ slice_type: str
130
+ item_type: str
131
+ items: tuple[Mapping[str, JSONValue], ...]
132
+
133
+ @classmethod
134
+ def from_object(cls, obj: object) -> SnapshotSlicePayload:
135
+ if not isinstance(obj, Mapping):
136
+ raise SnapshotRestoreError("Slice entry must be an object")
137
+
138
+ entry = cast(Mapping[str, JSONValue], obj)
139
+ slice_identifier = entry.get("slice_type")
140
+ item_identifier = entry.get("item_type")
141
+
142
+ if not isinstance(slice_identifier, str) or not isinstance(
143
+ item_identifier, str
144
+ ):
145
+ raise SnapshotRestoreError("Slice type identifiers must be strings")
146
+
147
+ items_obj_raw = entry.get("items", [])
148
+ if not isinstance(items_obj_raw, list):
149
+ raise SnapshotRestoreError("Slice items must be a list")
150
+
151
+ items_obj = items_obj_raw
152
+ items: list[Mapping[str, JSONValue]] = []
153
+ for item in items_obj:
154
+ if not isinstance(item, Mapping):
155
+ raise SnapshotRestoreError("Slice items must be objects")
156
+ items.append(cast(Mapping[str, JSONValue], item))
157
+
158
+ return cls(
159
+ slice_type=slice_identifier,
160
+ item_type=item_identifier,
161
+ items=tuple(items),
162
+ )
163
+
164
+
165
+ @dataclass(slots=True, frozen=True)
166
+ class SnapshotPayload:
167
+ """Typed representation of the serialized snapshot envelope."""
168
+
169
+ version: str
170
+ created_at: str
171
+ slices: tuple[SnapshotSlicePayload, ...]
172
+
173
+ @classmethod
174
+ def from_json(cls, raw: str) -> SnapshotPayload:
175
+ try:
176
+ payload_obj: JSONValue = json.loads(raw)
177
+ except json.JSONDecodeError as error:
178
+ raise SnapshotRestoreError("Invalid snapshot JSON") from error
179
+
180
+ if not isinstance(payload_obj, Mapping):
181
+ raise SnapshotRestoreError("Snapshot payload must be an object")
182
+
183
+ payload = cast(Mapping[str, JSONValue], payload_obj)
184
+ version = payload.get("version")
185
+ if not isinstance(version, str):
186
+ raise SnapshotRestoreError("Snapshot version must be a string")
187
+
188
+ created_at = payload.get("created_at")
189
+ if not isinstance(created_at, str):
190
+ raise SnapshotRestoreError("Snapshot created_at must be a string")
191
+
192
+ slices_obj = payload.get("slices", [])
193
+ if not isinstance(slices_obj, list):
194
+ raise SnapshotRestoreError("Snapshot slices must be a list")
195
+
196
+ slices_source = slices_obj
197
+ slices = tuple(
198
+ SnapshotSlicePayload.from_object(entry) for entry in slices_source
199
+ )
200
+ return cls(version=version, created_at=created_at, slices=slices)
201
+
202
+
203
+ @dataclass(slots=True, frozen=True)
204
+ class Snapshot:
205
+ """Frozen value object representing session slice state."""
206
+
207
+ created_at: datetime
208
+ slices: SnapshotState = field(
209
+ default_factory=lambda: cast(
210
+ SnapshotState,
211
+ types.MappingProxyType({}),
212
+ )
213
+ )
214
+
215
+ def __post_init__(self) -> None:
216
+ normalized: dict[SessionSliceType, SessionSlice] = {
217
+ slice_type: tuple(values) for slice_type, values in self.slices.items()
218
+ }
219
+ object.__setattr__(self, "created_at", _ensure_timezone(self.created_at))
220
+ object.__setattr__(
221
+ self,
222
+ "slices",
223
+ cast(SnapshotState, types.MappingProxyType(normalized)),
224
+ )
225
+
226
+ @override
227
+ def __hash__(self) -> int:
228
+ ordered = tuple(
229
+ sorted(
230
+ self.slices.items(),
231
+ key=lambda item: _type_identifier(item[0]),
232
+ )
233
+ )
234
+ return hash((self.created_at, ordered))
235
+
236
+ def to_json(self) -> str:
237
+ """Serialize the snapshot to a JSON string."""
238
+
239
+ payload_slices: list[dict[str, JSONValue]] = []
240
+ for slice_type, values in sorted(
241
+ self.slices.items(), key=lambda item: _type_identifier(item[0])
242
+ ):
243
+ item_type = _infer_item_type(slice_type, values)
244
+ try:
245
+ serialized_items = [
246
+ cast(Mapping[str, JSONValue], dump(value)) for value in values
247
+ ]
248
+ except Exception as error:
249
+ msg = f"Failed to serialize slice {slice_type.__qualname__}"
250
+ raise SnapshotSerializationError(msg) from error
251
+
252
+ payload_slices.append(
253
+ {
254
+ "slice_type": _type_identifier(slice_type),
255
+ "item_type": _type_identifier(item_type),
256
+ "items": serialized_items,
257
+ }
258
+ )
259
+
260
+ payload: dict[str, JSONValue] = {
261
+ "version": SNAPSHOT_SCHEMA_VERSION,
262
+ "created_at": self.created_at.isoformat(),
263
+ "slices": payload_slices,
264
+ }
265
+ return json.dumps(payload, sort_keys=True)
266
+
267
+ @classmethod
268
+ def from_json(cls, raw: str) -> Snapshot:
269
+ """Deserialize a snapshot from its JSON representation."""
270
+
271
+ payload = SnapshotPayload.from_json(raw)
272
+
273
+ if payload.version != SNAPSHOT_SCHEMA_VERSION:
274
+ msg = (
275
+ "Snapshot schema version mismatch: "
276
+ f"expected {SNAPSHOT_SCHEMA_VERSION}, got {payload.version!r}"
277
+ )
278
+ raise SnapshotRestoreError(msg)
279
+
280
+ try:
281
+ created_at = datetime.fromisoformat(payload.created_at)
282
+ except ValueError as error:
283
+ raise SnapshotRestoreError("Invalid created_at timestamp") from error
284
+
285
+ restored: dict[SessionSliceType, SessionSlice] = {}
286
+ for entry in payload.slices:
287
+ slice_type_candidate = _resolve_type(entry.slice_type)
288
+ item_type_candidate = _resolve_type(entry.item_type)
289
+
290
+ if not _is_dataclass_type(slice_type_candidate) or not _is_dataclass_type(
291
+ item_type_candidate
292
+ ):
293
+ raise SnapshotRestoreError("Snapshot types must be dataclasses")
294
+
295
+ slice_type = slice_type_candidate
296
+ item_type = item_type_candidate
297
+
298
+ restored_items: list[SupportsDataclass] = []
299
+ for item_mapping in entry.items:
300
+ try:
301
+ restored_item = parse(item_type, item_mapping)
302
+ except Exception as error:
303
+ raise SnapshotRestoreError(
304
+ f"Failed to restore slice {slice_type.__qualname__}"
305
+ ) from error
306
+ restored_items.append(restored_item)
307
+
308
+ restored[slice_type] = tuple(restored_items)
309
+
310
+ return cls(created_at=_ensure_timezone(created_at), slices=restored)
@@ -0,0 +1,19 @@
1
+ # Licensed under the Apache License, Version 2.0 (the "License");
2
+ # you may not use this file except in compliance with the License.
3
+ # You may obtain a copy of the License at
4
+ #
5
+ # http://www.apache.org/licenses/LICENSE-2.0
6
+ #
7
+ # Unless required by applicable law or agreed to in writing, software
8
+ # distributed under the License is distributed on an "AS IS" BASIS,
9
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10
+ # See the License for the specific language governing permissions and
11
+ # limitations under the License.
12
+
13
+ """Stdlib dataclass serde utilities."""
14
+
15
+ from .dump import clone, dump
16
+ from .parse import parse
17
+ from .schema import schema
18
+
19
+ __all__ = ["clone", "dump", "parse", "schema"]
@@ -0,0 +1,240 @@
1
+ # Licensed under the Apache License, Version 2.0 (the "License");
2
+ # you may not use this file except in compliance with the License.
3
+ # You may obtain a copy of the License at
4
+ #
5
+ # http://www.apache.org/licenses/LICENSE-2.0
6
+ #
7
+ # Unless required by applicable law or agreed to in writing, software
8
+ # distributed under the License is distributed on an "AS IS" BASIS,
9
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10
+ # See the License for the specific language governing permissions and
11
+ # limitations under the License.
12
+
13
+ """Shared helpers for dataclass serde operations."""
14
+
15
+ from __future__ import annotations
16
+
17
+ import re
18
+ from collections.abc import Callable, Iterable, Mapping, Sized
19
+ from dataclasses import dataclass
20
+ from decimal import Decimal
21
+ from re import Pattern
22
+ from typing import Any as _AnyType
23
+ from typing import Final, Literal, cast, get_args
24
+
25
+ from ..types import JSONValue
26
+
27
+ MISSING_SENTINEL: Final[object] = object()
28
+ _UNION_TYPE = type(int | str)
29
+
30
+
31
+ class _ExtrasDescriptor:
32
+ """Descriptor storing extras for slotted dataclasses."""
33
+
34
+ def __init__(self) -> None:
35
+ super().__init__()
36
+ self._store: dict[int, dict[str, object]] = {}
37
+
38
+ def __get__(
39
+ self, instance: object | None, owner: type[object]
40
+ ) -> dict[str, object] | None:
41
+ if instance is None:
42
+ return None
43
+ return self._store.get(id(instance))
44
+
45
+ def __set__(self, instance: object, value: dict[str, object] | None) -> None:
46
+ key = id(instance)
47
+ if value is None:
48
+ _ = self._store.pop(key, None)
49
+ else:
50
+ self._store[key] = dict(value)
51
+
52
+
53
+ _SLOTTED_EXTRAS: Final[dict[type[object], _ExtrasDescriptor]] = {}
54
+
55
+
56
+ def _ordered_values(values: Iterable[JSONValue]) -> list[JSONValue]:
57
+ """Return a deterministic list of metadata values."""
58
+
59
+ items = list(values)
60
+ if isinstance(values, (set, frozenset)):
61
+ return sorted(items, key=repr)
62
+ return items
63
+
64
+
65
+ def _set_extras(instance: object, extras: Mapping[str, object]) -> None:
66
+ """Attach extras to an instance, handling slotted dataclasses."""
67
+
68
+ extras_dict = dict(extras)
69
+ try:
70
+ object.__setattr__(instance, "__extras__", extras_dict)
71
+ except AttributeError:
72
+ cls = instance.__class__
73
+ descriptor = _SLOTTED_EXTRAS.get(cls)
74
+ if descriptor is None:
75
+ descriptor = _ExtrasDescriptor()
76
+ _SLOTTED_EXTRAS[cls] = descriptor
77
+ cls.__extras__ = descriptor # type: ignore[attr-defined]
78
+ descriptor.__set__(instance, extras_dict)
79
+
80
+
81
+ @dataclass(frozen=True)
82
+ class _ParseConfig:
83
+ extra: Literal["ignore", "forbid", "allow"]
84
+ coerce: bool
85
+ case_insensitive: bool
86
+ alias_generator: Callable[[str], str] | None
87
+ aliases: Mapping[str, str] | None
88
+
89
+
90
+ def _merge_annotated_meta(
91
+ typ: object, meta: Mapping[str, object] | None
92
+ ) -> tuple[object, dict[str, object]]:
93
+ merged: dict[str, object] = dict(meta or {})
94
+ base = typ
95
+ while getattr(base, "__metadata__", None) is not None:
96
+ args = get_args(base)
97
+ if not args:
98
+ break
99
+ base = args[0]
100
+ for extra in args[1:]:
101
+ if isinstance(extra, Mapping):
102
+ merged.update(cast(Mapping[str, object], extra))
103
+ return base, merged
104
+
105
+
106
+ def _apply_constraints[ConstrainedT](
107
+ value: ConstrainedT, meta: Mapping[str, object], path: str
108
+ ) -> ConstrainedT:
109
+ if not meta:
110
+ return value
111
+
112
+ result: object = value
113
+ if isinstance(result, str):
114
+ if meta.get("strip"):
115
+ result = result.strip()
116
+ if meta.get("lower") or meta.get("lowercase"):
117
+ result = result.lower()
118
+ if meta.get("upper") or meta.get("uppercase"):
119
+ result = result.upper()
120
+
121
+ def _normalize_option(option: JSONValue) -> JSONValue:
122
+ if isinstance(result, str) and isinstance(option, str):
123
+ candidate: str = option
124
+ if meta.get("strip"):
125
+ candidate = candidate.strip()
126
+ if meta.get("lower") or meta.get("lowercase"):
127
+ candidate = candidate.lower()
128
+ if meta.get("upper") or meta.get("uppercase"):
129
+ candidate = candidate.upper()
130
+ return candidate
131
+ return option
132
+
133
+ def _fail(message: str) -> None:
134
+ raise ValueError(f"{path}: {message}")
135
+
136
+ numeric_value = result
137
+ if isinstance(numeric_value, (int, float, Decimal)):
138
+ numeric = numeric_value
139
+ minimum_candidate = meta.get("ge", meta.get("minimum"))
140
+ if (
141
+ isinstance(minimum_candidate, (int, float, Decimal))
142
+ and numeric < minimum_candidate
143
+ ):
144
+ _fail(f"must be >= {minimum_candidate}")
145
+ exclusive_min_candidate = meta.get("gt", meta.get("exclusiveMinimum"))
146
+ if (
147
+ isinstance(exclusive_min_candidate, (int, float, Decimal))
148
+ and numeric <= exclusive_min_candidate
149
+ ):
150
+ _fail(f"must be > {exclusive_min_candidate}")
151
+ maximum_candidate = meta.get("le", meta.get("maximum"))
152
+ if (
153
+ isinstance(maximum_candidate, (int, float, Decimal))
154
+ and numeric > maximum_candidate
155
+ ):
156
+ _fail(f"must be <= {maximum_candidate}")
157
+ exclusive_max_candidate = meta.get("lt", meta.get("exclusiveMaximum"))
158
+ if (
159
+ isinstance(exclusive_max_candidate, (int, float, Decimal))
160
+ and numeric >= exclusive_max_candidate
161
+ ):
162
+ _fail(f"must be < {exclusive_max_candidate}")
163
+
164
+ if isinstance(result, Sized):
165
+ min_length_candidate = meta.get("min_length", meta.get("minLength"))
166
+ if isinstance(min_length_candidate, int) and len(result) < min_length_candidate:
167
+ _fail(f"length must be >= {min_length_candidate}")
168
+ max_length_candidate = meta.get("max_length", meta.get("maxLength"))
169
+ if isinstance(max_length_candidate, int) and len(result) > max_length_candidate:
170
+ _fail(f"length must be <= {max_length_candidate}")
171
+
172
+ pattern = meta.get("regex", meta.get("pattern"))
173
+ if isinstance(pattern, str) and isinstance(result, str):
174
+ if not re.search(pattern, result):
175
+ _fail(f"does not match pattern {pattern}")
176
+ elif isinstance(pattern, Pattern) and isinstance(result, str):
177
+ compiled_pattern = cast(Pattern[str], pattern)
178
+ if not compiled_pattern.search(result):
179
+ _fail(f"does not match pattern {pattern}")
180
+
181
+ members = meta.get("in") or meta.get("enum")
182
+ if isinstance(members, Iterable) and not isinstance(members, (str, bytes)):
183
+ options_iter = cast(Iterable[JSONValue], members)
184
+ options = _ordered_values(options_iter)
185
+ normalized_options = [_normalize_option(option) for option in options]
186
+ if result not in normalized_options:
187
+ _fail(f"must be one of {normalized_options}")
188
+
189
+ not_members = meta.get("not_in")
190
+ if isinstance(not_members, Iterable) and not isinstance(not_members, (str, bytes)):
191
+ forbidden_iter = cast(Iterable[JSONValue], not_members)
192
+ forbidden = _ordered_values(forbidden_iter)
193
+ normalized_forbidden = [_normalize_option(option) for option in forbidden]
194
+ if result in normalized_forbidden:
195
+ _fail(f"may not be one of {normalized_forbidden}")
196
+
197
+ validators = meta.get("validators", meta.get("validate"))
198
+ if validators:
199
+ callables: Iterable[Callable[[ConstrainedT], ConstrainedT]]
200
+ if isinstance(validators, Iterable) and not isinstance(
201
+ validators, (str, bytes)
202
+ ):
203
+ callables = cast(
204
+ Iterable[Callable[[ConstrainedT], ConstrainedT]], validators
205
+ )
206
+ else:
207
+ callables = (cast(Callable[[ConstrainedT], ConstrainedT], validators),)
208
+ for validator in callables:
209
+ try:
210
+ result = validator(cast(ConstrainedT, result))
211
+ except (TypeError, ValueError) as error:
212
+ raise type(error)(f"{path}: {error}") from error
213
+ except Exception as error: # pragma: no cover - defensive
214
+ raise ValueError(f"{path}: validator raised {error!r}") from error
215
+
216
+ converter = meta.get("convert", meta.get("transform"))
217
+ if converter:
218
+ converter_fn = cast(Callable[[ConstrainedT], ConstrainedT], converter)
219
+ try:
220
+ result = converter_fn(cast(ConstrainedT, result))
221
+ except (TypeError, ValueError) as error:
222
+ raise type(error)(f"{path}: {error}") from error
223
+ except Exception as error: # pragma: no cover - defensive
224
+ raise ValueError(f"{path}: converter raised {error!r}") from error
225
+
226
+ return cast(ConstrainedT, result)
227
+
228
+
229
+ __all__ = [
230
+ "MISSING_SENTINEL",
231
+ "_SLOTTED_EXTRAS",
232
+ "_UNION_TYPE",
233
+ "_AnyType",
234
+ "_ExtrasDescriptor",
235
+ "_ParseConfig",
236
+ "_apply_constraints",
237
+ "_merge_annotated_meta",
238
+ "_ordered_values",
239
+ "_set_extras",
240
+ ]
@@ -0,0 +1,55 @@
1
+ # Licensed under the Apache License, Version 2.0 (the "License");
2
+ # you may not use this file except in compliance with the License.
3
+ # You may obtain a copy of the License at
4
+ #
5
+ # http://www.apache.org/licenses/LICENSE-2.0
6
+ #
7
+ # Unless required by applicable law or agreed to in writing, software
8
+ # distributed under the License is distributed on an "AS IS" BASIS,
9
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10
+ # See the License for the specific language governing permissions and
11
+ # limitations under the License.
12
+
13
+ """Backward compatible exports for dataclass serde helpers."""
14
+
15
+ # pyright: reportPrivateUsage=false
16
+
17
+ from __future__ import annotations
18
+
19
+ from typing import get_args
20
+
21
+ from ._utils import (
22
+ _SLOTTED_EXTRAS,
23
+ _UNION_TYPE,
24
+ MISSING_SENTINEL,
25
+ _AnyType,
26
+ _apply_constraints,
27
+ _ExtrasDescriptor,
28
+ _merge_annotated_meta,
29
+ _ordered_values,
30
+ _ParseConfig,
31
+ _set_extras,
32
+ )
33
+ from .dump import clone, dump
34
+ from .parse import _bool_from_str, _coerce_to_type, parse
35
+ from .schema import schema
36
+
37
+ __all__ = [
38
+ "MISSING_SENTINEL",
39
+ "_SLOTTED_EXTRAS",
40
+ "_UNION_TYPE",
41
+ "_AnyType",
42
+ "_ExtrasDescriptor",
43
+ "_ParseConfig",
44
+ "_apply_constraints",
45
+ "_bool_from_str",
46
+ "_coerce_to_type",
47
+ "_merge_annotated_meta",
48
+ "_ordered_values",
49
+ "_set_extras",
50
+ "clone",
51
+ "dump",
52
+ "get_args",
53
+ "parse",
54
+ "schema",
55
+ ]