weakincentives 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. weakincentives/__init__.py +67 -0
  2. weakincentives/adapters/__init__.py +37 -0
  3. weakincentives/adapters/_names.py +32 -0
  4. weakincentives/adapters/_provider_protocols.py +69 -0
  5. weakincentives/adapters/_tool_messages.py +80 -0
  6. weakincentives/adapters/core.py +102 -0
  7. weakincentives/adapters/litellm.py +254 -0
  8. weakincentives/adapters/openai.py +254 -0
  9. weakincentives/adapters/shared.py +1021 -0
  10. weakincentives/cli/__init__.py +23 -0
  11. weakincentives/cli/wink.py +58 -0
  12. weakincentives/dbc/__init__.py +412 -0
  13. weakincentives/deadlines.py +58 -0
  14. weakincentives/prompt/__init__.py +105 -0
  15. weakincentives/prompt/_generic_params_specializer.py +64 -0
  16. weakincentives/prompt/_normalization.py +48 -0
  17. weakincentives/prompt/_overrides_protocols.py +33 -0
  18. weakincentives/prompt/_types.py +34 -0
  19. weakincentives/prompt/chapter.py +146 -0
  20. weakincentives/prompt/composition.py +281 -0
  21. weakincentives/prompt/errors.py +57 -0
  22. weakincentives/prompt/markdown.py +108 -0
  23. weakincentives/prompt/overrides/__init__.py +59 -0
  24. weakincentives/prompt/overrides/_fs.py +164 -0
  25. weakincentives/prompt/overrides/inspection.py +141 -0
  26. weakincentives/prompt/overrides/local_store.py +275 -0
  27. weakincentives/prompt/overrides/validation.py +534 -0
  28. weakincentives/prompt/overrides/versioning.py +269 -0
  29. weakincentives/prompt/prompt.py +353 -0
  30. weakincentives/prompt/protocols.py +103 -0
  31. weakincentives/prompt/registry.py +375 -0
  32. weakincentives/prompt/rendering.py +288 -0
  33. weakincentives/prompt/response_format.py +60 -0
  34. weakincentives/prompt/section.py +166 -0
  35. weakincentives/prompt/structured_output.py +179 -0
  36. weakincentives/prompt/tool.py +397 -0
  37. weakincentives/prompt/tool_result.py +30 -0
  38. weakincentives/py.typed +0 -0
  39. weakincentives/runtime/__init__.py +82 -0
  40. weakincentives/runtime/events/__init__.py +126 -0
  41. weakincentives/runtime/events/_types.py +110 -0
  42. weakincentives/runtime/logging.py +284 -0
  43. weakincentives/runtime/session/__init__.py +46 -0
  44. weakincentives/runtime/session/_slice_types.py +24 -0
  45. weakincentives/runtime/session/_types.py +55 -0
  46. weakincentives/runtime/session/dataclasses.py +29 -0
  47. weakincentives/runtime/session/protocols.py +34 -0
  48. weakincentives/runtime/session/reducer_context.py +40 -0
  49. weakincentives/runtime/session/reducers.py +82 -0
  50. weakincentives/runtime/session/selectors.py +56 -0
  51. weakincentives/runtime/session/session.py +387 -0
  52. weakincentives/runtime/session/snapshots.py +310 -0
  53. weakincentives/serde/__init__.py +19 -0
  54. weakincentives/serde/_utils.py +240 -0
  55. weakincentives/serde/dataclass_serde.py +55 -0
  56. weakincentives/serde/dump.py +189 -0
  57. weakincentives/serde/parse.py +417 -0
  58. weakincentives/serde/schema.py +260 -0
  59. weakincentives/tools/__init__.py +154 -0
  60. weakincentives/tools/_context.py +38 -0
  61. weakincentives/tools/asteval.py +853 -0
  62. weakincentives/tools/errors.py +26 -0
  63. weakincentives/tools/planning.py +831 -0
  64. weakincentives/tools/podman.py +1655 -0
  65. weakincentives/tools/subagents.py +346 -0
  66. weakincentives/tools/vfs.py +1390 -0
  67. weakincentives/types/__init__.py +35 -0
  68. weakincentives/types/json.py +45 -0
  69. weakincentives-0.9.0.dist-info/METADATA +775 -0
  70. weakincentives-0.9.0.dist-info/RECORD +73 -0
  71. weakincentives-0.9.0.dist-info/WHEEL +4 -0
  72. weakincentives-0.9.0.dist-info/entry_points.txt +2 -0
  73. weakincentives-0.9.0.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,189 @@
1
+ # Licensed under the Apache License, Version 2.0 (the "License");
2
+ # you may not use this file except in compliance with the License.
3
+ # You may obtain a copy of the License at
4
+ #
5
+ # http://www.apache.org/licenses/LICENSE-2.0
6
+ #
7
+ # Unless required by applicable law or agreed to in writing, software
8
+ # distributed under the License is distributed on an "AS IS" BASIS,
9
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10
+ # See the License for the specific language governing permissions and
11
+ # limitations under the License.
12
+
13
+ """Dataclass serialization helpers."""
14
+
15
+ # pyright: reportUnknownArgumentType=false, reportUnknownVariableType=false, reportUnknownMemberType=false, reportUnknownParameterType=false, reportCallIssue=false, reportArgumentType=false, reportPrivateUsage=false
16
+
17
+ from __future__ import annotations
18
+
19
+ import dataclasses
20
+ from collections.abc import Callable, Mapping, Sequence
21
+ from datetime import date, datetime, time
22
+ from decimal import Decimal
23
+ from enum import Enum
24
+ from pathlib import Path
25
+ from typing import cast
26
+ from uuid import UUID
27
+
28
+ from ..types import JSONValue
29
+ from ._utils import MISSING_SENTINEL, _set_extras
30
+
31
+
32
+ def _serialize(
33
+ value: object,
34
+ *,
35
+ by_alias: bool,
36
+ exclude_none: bool,
37
+ alias_generator: Callable[[str], str] | None,
38
+ ) -> JSONValue | object:
39
+ if value is None:
40
+ return MISSING_SENTINEL if exclude_none else None
41
+ if dataclasses.is_dataclass(value):
42
+ return dump(
43
+ value,
44
+ by_alias=by_alias,
45
+ exclude_none=exclude_none,
46
+ computed=False,
47
+ alias_generator=alias_generator,
48
+ )
49
+ if isinstance(value, Enum):
50
+ return value.value
51
+ if isinstance(value, (datetime, date, time)):
52
+ return value.isoformat()
53
+ if isinstance(value, (UUID, Decimal, Path)):
54
+ return str(value)
55
+ if isinstance(value, Mapping):
56
+ serialized: dict[object, JSONValue] = {}
57
+ for key, item in value.items():
58
+ item_value = _serialize(
59
+ item,
60
+ by_alias=by_alias,
61
+ exclude_none=exclude_none,
62
+ alias_generator=alias_generator,
63
+ )
64
+ if item_value is MISSING_SENTINEL:
65
+ continue
66
+ serialized[key] = cast(JSONValue, item_value)
67
+ return serialized
68
+ if isinstance(value, set):
69
+ items: list[JSONValue] = []
70
+ for member in value:
71
+ item_value = _serialize(
72
+ member,
73
+ by_alias=by_alias,
74
+ exclude_none=exclude_none,
75
+ alias_generator=alias_generator,
76
+ )
77
+ if item_value is MISSING_SENTINEL:
78
+ continue
79
+ items.append(cast(JSONValue, item_value))
80
+ try:
81
+ return sorted(items, key=repr)
82
+ except TypeError:
83
+ return items
84
+ if isinstance(value, Sequence) and not isinstance(value, (str, bytes, bytearray)):
85
+ items: list[JSONValue] = []
86
+ for item in value:
87
+ item_value = _serialize(
88
+ item,
89
+ by_alias=by_alias,
90
+ exclude_none=exclude_none,
91
+ alias_generator=alias_generator,
92
+ )
93
+ if item_value is MISSING_SENTINEL:
94
+ continue
95
+ items.append(cast(JSONValue, item_value))
96
+ return items
97
+ return value
98
+
99
+
100
+ def dump(
101
+ obj: object,
102
+ *,
103
+ by_alias: bool = True,
104
+ exclude_none: bool = False,
105
+ computed: bool = False,
106
+ alias_generator: Callable[[str], str] | None = None,
107
+ ) -> dict[str, JSONValue]:
108
+ """Serialize a dataclass instance to a JSON-compatible dictionary."""
109
+
110
+ if not dataclasses.is_dataclass(obj) or isinstance(obj, type):
111
+ raise TypeError("dump() requires a dataclass instance")
112
+
113
+ result: dict[str, JSONValue] = {}
114
+ for field in dataclasses.fields(obj):
115
+ field_meta = dict(field.metadata)
116
+ key = field.name
117
+ if by_alias:
118
+ alias = field_meta.get("alias")
119
+ if alias is None and alias_generator is not None:
120
+ alias = alias_generator(field.name)
121
+ if alias:
122
+ key = alias
123
+ value = getattr(obj, field.name)
124
+ serialized = _serialize(
125
+ value,
126
+ by_alias=by_alias,
127
+ exclude_none=exclude_none,
128
+ alias_generator=alias_generator,
129
+ )
130
+ if serialized is MISSING_SENTINEL:
131
+ continue
132
+ result[key] = cast(JSONValue, serialized)
133
+
134
+ if computed and hasattr(obj.__class__, "__computed__"):
135
+ computed_fields = cast(
136
+ Sequence[str], getattr(obj.__class__, "__computed__", ())
137
+ )
138
+ for name in computed_fields:
139
+ value = getattr(obj, name)
140
+ serialized = _serialize(
141
+ value,
142
+ by_alias=by_alias,
143
+ exclude_none=exclude_none,
144
+ alias_generator=alias_generator,
145
+ )
146
+ if serialized is MISSING_SENTINEL:
147
+ continue
148
+ key = name
149
+ if by_alias and alias_generator is not None:
150
+ key = alias_generator(name)
151
+ result[key] = cast(JSONValue, serialized)
152
+
153
+ return result
154
+
155
+
156
+ def clone[T](obj: T, **updates: object) -> T:
157
+ """Clone a dataclass instance and re-run model-level validation hooks."""
158
+
159
+ if not dataclasses.is_dataclass(obj) or isinstance(obj, type):
160
+ raise TypeError("clone() requires a dataclass instance")
161
+ field_names = {field.name for field in dataclasses.fields(obj)}
162
+ extras: dict[str, object] = {}
163
+ extras_attr = getattr(obj, "__extras__", None)
164
+ if hasattr(obj, "__dict__"):
165
+ extras = {
166
+ key: value for key, value in obj.__dict__.items() if key not in field_names
167
+ }
168
+ elif isinstance(extras_attr, Mapping):
169
+ extras = dict(extras_attr)
170
+
171
+ cloned = dataclasses.replace(obj, **updates)
172
+
173
+ if extras:
174
+ if hasattr(cloned, "__dict__"):
175
+ for key, value in extras.items():
176
+ object.__setattr__(cloned, key, value)
177
+ else:
178
+ _set_extras(cloned, extras)
179
+
180
+ validator = getattr(cloned, "__validate__", None)
181
+ if callable(validator):
182
+ _ = validator()
183
+ post_validator = getattr(cloned, "__post_validate__", None)
184
+ if callable(post_validator):
185
+ _ = post_validator()
186
+ return cloned
187
+
188
+
189
+ __all__ = ["clone", "dump"]
@@ -0,0 +1,417 @@
1
+ # Licensed under the Apache License, Version 2.0 (the "License");
2
+ # you may not use this file except in compliance with the License.
3
+ # You may obtain a copy of the License at
4
+ #
5
+ # http://www.apache.org/licenses/LICENSE-2.0
6
+ #
7
+ # Unless required by applicable law or agreed to in writing, software
8
+ # distributed under the License is distributed on an "AS IS" BASIS,
9
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10
+ # See the License for the specific language governing permissions and
11
+ # limitations under the License.
12
+
13
+ """Dataclass parsing helpers."""
14
+
15
+ # pyright: reportUnknownArgumentType=false, reportUnknownVariableType=false, reportUnknownMemberType=false, reportUnknownParameterType=false, reportUnnecessaryIsInstance=false, reportCallIssue=false, reportArgumentType=false, reportPossiblyUnboundVariable=false, reportPrivateUsage=false
16
+
17
+ from __future__ import annotations
18
+
19
+ import dataclasses
20
+ from collections.abc import Callable, Iterable, Mapping, Sequence
21
+ from dataclasses import MISSING
22
+ from datetime import date, datetime, time
23
+ from decimal import Decimal
24
+ from enum import Enum
25
+ from pathlib import Path
26
+ from typing import Literal, cast, get_origin, get_type_hints
27
+ from typing import get_args as typing_get_args
28
+ from uuid import UUID
29
+
30
+ from ..types import JSONValue
31
+ from ._utils import (
32
+ _UNION_TYPE,
33
+ _AnyType,
34
+ _apply_constraints,
35
+ _merge_annotated_meta,
36
+ _ParseConfig,
37
+ _set_extras,
38
+ )
39
+
40
+ get_args = typing_get_args
41
+
42
+
43
+ def _bool_from_str(value: str) -> bool:
44
+ lowered = value.strip().lower()
45
+ truthy = {"true", "1", "yes", "on"}
46
+ falsy = {"false", "0", "no", "off"}
47
+ if lowered in truthy:
48
+ return True
49
+ if lowered in falsy:
50
+ return False
51
+ raise TypeError(f"Cannot interpret '{value}' as boolean")
52
+
53
+
54
+ def _coerce_to_type(
55
+ value: object,
56
+ typ: object,
57
+ meta: Mapping[str, object] | None,
58
+ path: str,
59
+ config: _ParseConfig,
60
+ ) -> object:
61
+ base_type, merged_meta = _merge_annotated_meta(typ, meta)
62
+ origin = get_origin(base_type)
63
+ type_name = getattr(base_type, "__name__", type(base_type).__name__)
64
+
65
+ if base_type is object or base_type is _AnyType:
66
+ return _apply_constraints(value, merged_meta, path)
67
+
68
+ if origin is _UNION_TYPE:
69
+ if (
70
+ config.coerce
71
+ and isinstance(value, str)
72
+ and value.strip() == ""
73
+ and any(arg is type(None) for arg in get_args(base_type))
74
+ ):
75
+ return _apply_constraints(None, merged_meta, path)
76
+ last_error: Exception | None = None
77
+ for arg in get_args(base_type):
78
+ if arg is type(None):
79
+ if value is None:
80
+ return _apply_constraints(None, merged_meta, path)
81
+ continue
82
+ try:
83
+ coerced = _coerce_to_type(value, arg, None, path, config)
84
+ except (TypeError, ValueError) as error:
85
+ last_error = error
86
+ continue
87
+ return _apply_constraints(coerced, merged_meta, path)
88
+ if last_error is not None:
89
+ message = str(last_error)
90
+ if message.startswith(f"{path}:") or message.startswith(f"{path}."):
91
+ raise last_error
92
+ if isinstance(last_error, TypeError):
93
+ raise TypeError(f"{path}: {message}") from last_error
94
+ raise ValueError(f"{path}: {message}") from last_error
95
+ raise TypeError(f"{path}: no matching type in Union")
96
+
97
+ if base_type is type(None):
98
+ if value is not None:
99
+ raise TypeError(f"{path}: expected None")
100
+ return None
101
+
102
+ if value is None:
103
+ raise TypeError(f"{path}: value cannot be None")
104
+
105
+ if origin is Literal:
106
+ literals = get_args(base_type)
107
+ last_literal_error: Exception | None = None
108
+ for literal in literals:
109
+ if value == literal:
110
+ return _apply_constraints(literal, merged_meta, path)
111
+ if config.coerce:
112
+ literal_type = cast(type[object], type(literal))
113
+ try:
114
+ if isinstance(literal, bool) and isinstance(value, str):
115
+ coerced_literal = _bool_from_str(value)
116
+ else:
117
+ coerced_literal = literal_type(value)
118
+ except (TypeError, ValueError) as error:
119
+ last_literal_error = error
120
+ continue
121
+ if coerced_literal == literal:
122
+ return _apply_constraints(literal, merged_meta, path)
123
+ if last_literal_error is not None:
124
+ raise type(last_literal_error)(
125
+ f"{path}: {last_literal_error}"
126
+ ) from last_literal_error
127
+ raise ValueError(f"{path}: expected one of {list(literals)}")
128
+
129
+ if dataclasses.is_dataclass(base_type):
130
+ dataclass_type = base_type if isinstance(base_type, type) else type(base_type)
131
+ if isinstance(value, dataclass_type):
132
+ return _apply_constraints(value, merged_meta, path)
133
+ if not isinstance(value, Mapping):
134
+ type_name = getattr(
135
+ dataclass_type, "__name__", type(dataclass_type).__name__
136
+ )
137
+ raise TypeError(f"{path}: expected mapping for dataclass {type_name}")
138
+ try:
139
+ parsed = parse(
140
+ cast(type[object], dataclass_type),
141
+ cast(Mapping[str, object], value),
142
+ extra=config.extra,
143
+ coerce=config.coerce,
144
+ case_insensitive=config.case_insensitive,
145
+ alias_generator=config.alias_generator,
146
+ aliases=config.aliases,
147
+ )
148
+ except (TypeError, ValueError) as error:
149
+ message = str(error)
150
+ if ":" in message:
151
+ prefix, suffix = message.split(":", 1)
152
+ if " " not in prefix:
153
+ message = f"{path}.{prefix}:{suffix}"
154
+ else:
155
+ message = f"{path}: {message}"
156
+ else:
157
+ message = f"{path}: {message}"
158
+ raise type(error)(message) from error
159
+ return _apply_constraints(parsed, merged_meta, path)
160
+
161
+ if origin in {list, Sequence, tuple, set}:
162
+ is_sequence_like = isinstance(value, Sequence) and not isinstance(
163
+ value, (str, bytes, bytearray)
164
+ )
165
+ if origin in {list, Sequence} and not is_sequence_like:
166
+ if config.coerce and isinstance(value, str):
167
+ value = [value]
168
+ else:
169
+ raise TypeError(f"{path}: expected sequence")
170
+ if origin is set and not isinstance(value, (set, list, tuple)):
171
+ if config.coerce:
172
+ if isinstance(value, str):
173
+ value = [value]
174
+ elif isinstance(value, Iterable):
175
+ value = list(cast(Iterable[JSONValue], value))
176
+ else:
177
+ raise TypeError(f"{path}: expected set")
178
+ else:
179
+ raise TypeError(f"{path}: expected set")
180
+ if origin is tuple and not is_sequence_like:
181
+ if config.coerce and isinstance(value, str):
182
+ value = [value]
183
+ else:
184
+ raise TypeError(f"{path}: expected tuple")
185
+
186
+ if isinstance(value, str): # pragma: no cover - handled by earlier coercion
187
+ items = [value]
188
+ elif isinstance(value, Iterable):
189
+ items = list(cast(Iterable[JSONValue], value))
190
+ else: # pragma: no cover - defensive guard
191
+ raise TypeError(f"{path}: expected iterable")
192
+ args = get_args(base_type)
193
+ coerced_items: list[object] = []
194
+ if (
195
+ origin is tuple
196
+ and args
197
+ and args[-1] is not Ellipsis
198
+ and len(args) != len(items)
199
+ ):
200
+ raise ValueError(f"{path}: expected {len(args)} items")
201
+ for index, item in enumerate(items):
202
+ item_path = f"{path}[{index}]"
203
+ if origin is tuple and args:
204
+ item_type = args[0] if args[-1] is Ellipsis else args[index]
205
+ else:
206
+ item_type = args[0] if args else object
207
+ coerced_items.append(
208
+ _coerce_to_type(item, item_type, None, item_path, config)
209
+ )
210
+ if origin is set:
211
+ value_out: object = set(coerced_items)
212
+ elif origin is tuple:
213
+ value_out = tuple(coerced_items)
214
+ else:
215
+ value_out = list(coerced_items)
216
+ return _apply_constraints(value_out, merged_meta, path)
217
+
218
+ if origin is dict or origin is Mapping:
219
+ if not isinstance(value, Mapping):
220
+ raise TypeError(f"{path}: expected mapping")
221
+ key_type, value_type = (
222
+ get_args(base_type) if get_args(base_type) else (object, object)
223
+ )
224
+ mapping_value = cast(Mapping[JSONValue, JSONValue], value)
225
+ result_dict: dict[object, object] = {}
226
+ for key, item in mapping_value.items():
227
+ coerced_key = _coerce_to_type(key, key_type, None, f"{path} keys", config)
228
+ coerced_value = _coerce_to_type(
229
+ item, value_type, None, f"{path}[{coerced_key}]", config
230
+ )
231
+ result_dict[coerced_key] = coerced_value
232
+ return _apply_constraints(result_dict, merged_meta, path)
233
+
234
+ if isinstance(base_type, type) and issubclass(base_type, Enum):
235
+ if isinstance(value, base_type):
236
+ enum_value = value
237
+ elif config.coerce:
238
+ try:
239
+ enum_value = base_type[value]
240
+ except KeyError:
241
+ try:
242
+ enum_value = base_type(value)
243
+ except ValueError as error:
244
+ raise ValueError(f"{path}: invalid enum value {value!r}") from error
245
+ except TypeError:
246
+ try:
247
+ enum_value = base_type(value)
248
+ except ValueError as error:
249
+ raise ValueError(f"{path}: invalid enum value {value!r}") from error
250
+ else:
251
+ raise TypeError(f"{path}: expected {type_name}")
252
+ return _apply_constraints(enum_value, merged_meta, path)
253
+
254
+ if base_type is bool:
255
+ if isinstance(value, bool):
256
+ return _apply_constraints(value, merged_meta, path)
257
+ if config.coerce and isinstance(value, str):
258
+ try:
259
+ coerced_bool = _bool_from_str(value)
260
+ except TypeError as error:
261
+ raise TypeError(f"{path}: {error}") from error
262
+ return _apply_constraints(coerced_bool, merged_meta, path)
263
+ if config.coerce and isinstance(value, (int, float)):
264
+ return _apply_constraints(bool(value), merged_meta, path)
265
+ raise TypeError(f"{path}: expected bool")
266
+
267
+ if base_type in {int, float, str, Decimal, UUID, Path, datetime, date, time}:
268
+ literal_type = cast(type[object], base_type)
269
+ if isinstance(value, literal_type):
270
+ return _apply_constraints(value, merged_meta, path)
271
+ if not config.coerce:
272
+ raise TypeError(f"{path}: expected {type_name}")
273
+ coerced_value: object | None = None
274
+ try:
275
+ if literal_type is int:
276
+ coerced_value = int(value)
277
+ elif literal_type is float:
278
+ coerced_value = float(value)
279
+ elif literal_type is str:
280
+ coerced_value = str(value)
281
+ elif literal_type is Decimal:
282
+ coerced_value = Decimal(str(value))
283
+ elif literal_type is UUID:
284
+ coerced_value = UUID(str(value))
285
+ elif literal_type is Path:
286
+ coerced_value = Path(str(value))
287
+ elif literal_type is datetime:
288
+ coerced_value = datetime.fromisoformat(str(value))
289
+ elif literal_type is date:
290
+ coerced_value = date.fromisoformat(str(value))
291
+ elif literal_type is time:
292
+ coerced_value = time.fromisoformat(str(value))
293
+ except Exception as error:
294
+ raise TypeError(
295
+ f"{path}: unable to coerce {value!r} to {type_name}"
296
+ ) from error
297
+ if (
298
+ coerced_value is None
299
+ ): # pragma: no cover - impossible when branches exhaust types
300
+ raise AssertionError("Unhandled literal type coercion")
301
+ return _apply_constraints(coerced_value, merged_meta, path)
302
+
303
+ try:
304
+ coerced = base_type(value)
305
+ except Exception as error:
306
+ raise type(error)(str(error)) from error
307
+ return _apply_constraints(coerced, merged_meta, path)
308
+
309
+
310
+ def _find_key(
311
+ data: Mapping[str, object], name: str, alias: str | None, case_insensitive: bool
312
+ ) -> str | None:
313
+ candidates = [alias, name]
314
+ for candidate in candidates:
315
+ if candidate is None:
316
+ continue
317
+ if candidate in data:
318
+ return candidate
319
+ if not case_insensitive:
320
+ return None
321
+ lowered_map: dict[str, str] = {}
322
+ for key in data:
323
+ if isinstance(key, str):
324
+ _ = lowered_map.setdefault(key.lower(), key)
325
+ for candidate in candidates:
326
+ if candidate is None or not isinstance(candidate, str):
327
+ continue
328
+ lowered = candidate.lower()
329
+ if lowered in lowered_map:
330
+ return lowered_map[lowered]
331
+ return None
332
+
333
+
334
+ def parse[T](
335
+ cls: type[T],
336
+ data: Mapping[str, object] | object,
337
+ *,
338
+ extra: Literal["ignore", "forbid", "allow"] = "ignore",
339
+ coerce: bool = True,
340
+ case_insensitive: bool = False,
341
+ alias_generator: Callable[[str], str] | None = None,
342
+ aliases: Mapping[str, str] | None = None,
343
+ ) -> T:
344
+ """Parse a mapping into a dataclass instance."""
345
+
346
+ if not dataclasses.is_dataclass(cls) or not isinstance(cls, type):
347
+ raise TypeError("parse() requires a dataclass type")
348
+ if not isinstance(data, Mapping):
349
+ raise TypeError("parse() requires a mapping input")
350
+ if extra not in {"ignore", "forbid", "allow"}:
351
+ raise ValueError("extra must be one of 'ignore', 'forbid', or 'allow'")
352
+
353
+ config = _ParseConfig(
354
+ extra=extra,
355
+ coerce=coerce,
356
+ case_insensitive=case_insensitive,
357
+ alias_generator=alias_generator,
358
+ aliases=aliases,
359
+ )
360
+
361
+ mapping_data = cast(Mapping[str, object], data)
362
+ type_hints = get_type_hints(cls, include_extras=True)
363
+ kwargs: dict[str, object] = {}
364
+ used_keys: set[str] = set()
365
+
366
+ for field in dataclasses.fields(cls):
367
+ if not field.init:
368
+ continue
369
+ field_meta = dict(field.metadata)
370
+ field_alias = None
371
+ if aliases and field.name in aliases:
372
+ field_alias = aliases[field.name]
373
+ elif (alias := field_meta.get("alias")) is not None:
374
+ field_alias = alias
375
+ elif alias_generator is not None:
376
+ field_alias = alias_generator(field.name)
377
+
378
+ key = _find_key(mapping_data, field.name, field_alias, case_insensitive)
379
+ if key is None:
380
+ if field.default is MISSING and field.default_factory is MISSING:
381
+ raise ValueError(f"Missing required field: '{field.name}'")
382
+ continue
383
+ used_keys.add(key)
384
+ raw_value = mapping_data[key]
385
+ field_type = type_hints.get(field.name, field.type)
386
+ try:
387
+ value = _coerce_to_type(
388
+ raw_value, field_type, field_meta, field.name, config
389
+ )
390
+ except (TypeError, ValueError) as error:
391
+ raise type(error)(str(error)) from error
392
+ kwargs[field.name] = value
393
+
394
+ instance = cls(**kwargs)
395
+
396
+ extras = {key: mapping_data[key] for key in mapping_data if key not in used_keys}
397
+ if extras:
398
+ if extra == "forbid":
399
+ raise ValueError(f"Extra keys not permitted: {list(extras.keys())}")
400
+ if extra == "allow":
401
+ if hasattr(instance, "__dict__"):
402
+ for key, value in extras.items():
403
+ object.__setattr__(instance, key, value)
404
+ else:
405
+ _set_extras(instance, extras)
406
+
407
+ validator = getattr(instance, "__validate__", None)
408
+ if callable(validator):
409
+ _ = validator()
410
+ post_validator = getattr(instance, "__post_validate__", None)
411
+ if callable(post_validator):
412
+ _ = post_validator()
413
+
414
+ return instance
415
+
416
+
417
+ __all__ = ["parse"]