pyopenapi-gen 2.7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyopenapi_gen/__init__.py +224 -0
- pyopenapi_gen/__main__.py +6 -0
- pyopenapi_gen/cli.py +62 -0
- pyopenapi_gen/context/CLAUDE.md +284 -0
- pyopenapi_gen/context/file_manager.py +52 -0
- pyopenapi_gen/context/import_collector.py +382 -0
- pyopenapi_gen/context/render_context.py +726 -0
- pyopenapi_gen/core/CLAUDE.md +224 -0
- pyopenapi_gen/core/__init__.py +0 -0
- pyopenapi_gen/core/auth/base.py +22 -0
- pyopenapi_gen/core/auth/plugins.py +89 -0
- pyopenapi_gen/core/cattrs_converter.py +810 -0
- pyopenapi_gen/core/exceptions.py +20 -0
- pyopenapi_gen/core/http_status_codes.py +218 -0
- pyopenapi_gen/core/http_transport.py +222 -0
- pyopenapi_gen/core/loader/__init__.py +12 -0
- pyopenapi_gen/core/loader/loader.py +174 -0
- pyopenapi_gen/core/loader/operations/__init__.py +12 -0
- pyopenapi_gen/core/loader/operations/parser.py +161 -0
- pyopenapi_gen/core/loader/operations/post_processor.py +62 -0
- pyopenapi_gen/core/loader/operations/request_body.py +90 -0
- pyopenapi_gen/core/loader/parameters/__init__.py +10 -0
- pyopenapi_gen/core/loader/parameters/parser.py +186 -0
- pyopenapi_gen/core/loader/responses/__init__.py +10 -0
- pyopenapi_gen/core/loader/responses/parser.py +111 -0
- pyopenapi_gen/core/loader/schemas/__init__.py +11 -0
- pyopenapi_gen/core/loader/schemas/extractor.py +275 -0
- pyopenapi_gen/core/pagination.py +64 -0
- pyopenapi_gen/core/parsing/__init__.py +13 -0
- pyopenapi_gen/core/parsing/common/__init__.py +1 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/__init__.py +9 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/__init__.py +0 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/cyclic_properties.py +66 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/direct_cycle.py +33 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/existing_schema.py +22 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/list_response.py +54 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/missing_ref.py +52 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/new_schema.py +50 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/stripped_suffix.py +51 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/resolve_schema_ref.py +86 -0
- pyopenapi_gen/core/parsing/common/type_parser.py +73 -0
- pyopenapi_gen/core/parsing/context.py +187 -0
- pyopenapi_gen/core/parsing/cycle_helpers.py +126 -0
- pyopenapi_gen/core/parsing/keywords/__init__.py +1 -0
- pyopenapi_gen/core/parsing/keywords/all_of_parser.py +81 -0
- pyopenapi_gen/core/parsing/keywords/any_of_parser.py +84 -0
- pyopenapi_gen/core/parsing/keywords/array_items_parser.py +72 -0
- pyopenapi_gen/core/parsing/keywords/one_of_parser.py +77 -0
- pyopenapi_gen/core/parsing/keywords/properties_parser.py +98 -0
- pyopenapi_gen/core/parsing/schema_finalizer.py +169 -0
- pyopenapi_gen/core/parsing/schema_parser.py +804 -0
- pyopenapi_gen/core/parsing/transformers/__init__.py +0 -0
- pyopenapi_gen/core/parsing/transformers/inline_enum_extractor.py +285 -0
- pyopenapi_gen/core/parsing/transformers/inline_object_promoter.py +120 -0
- pyopenapi_gen/core/parsing/unified_cycle_detection.py +293 -0
- pyopenapi_gen/core/postprocess_manager.py +260 -0
- pyopenapi_gen/core/spec_fetcher.py +148 -0
- pyopenapi_gen/core/streaming_helpers.py +84 -0
- pyopenapi_gen/core/telemetry.py +69 -0
- pyopenapi_gen/core/utils.py +456 -0
- pyopenapi_gen/core/warning_collector.py +83 -0
- pyopenapi_gen/core/writers/code_writer.py +135 -0
- pyopenapi_gen/core/writers/documentation_writer.py +222 -0
- pyopenapi_gen/core/writers/line_writer.py +217 -0
- pyopenapi_gen/core/writers/python_construct_renderer.py +321 -0
- pyopenapi_gen/core_package_template/README.md +21 -0
- pyopenapi_gen/emit/models_emitter.py +143 -0
- pyopenapi_gen/emitters/CLAUDE.md +286 -0
- pyopenapi_gen/emitters/client_emitter.py +51 -0
- pyopenapi_gen/emitters/core_emitter.py +181 -0
- pyopenapi_gen/emitters/docs_emitter.py +44 -0
- pyopenapi_gen/emitters/endpoints_emitter.py +247 -0
- pyopenapi_gen/emitters/exceptions_emitter.py +187 -0
- pyopenapi_gen/emitters/mocks_emitter.py +185 -0
- pyopenapi_gen/emitters/models_emitter.py +426 -0
- pyopenapi_gen/generator/CLAUDE.md +352 -0
- pyopenapi_gen/generator/client_generator.py +567 -0
- pyopenapi_gen/generator/exceptions.py +7 -0
- pyopenapi_gen/helpers/CLAUDE.md +325 -0
- pyopenapi_gen/helpers/__init__.py +1 -0
- pyopenapi_gen/helpers/endpoint_utils.py +532 -0
- pyopenapi_gen/helpers/type_cleaner.py +334 -0
- pyopenapi_gen/helpers/type_helper.py +112 -0
- pyopenapi_gen/helpers/type_resolution/__init__.py +1 -0
- pyopenapi_gen/helpers/type_resolution/array_resolver.py +57 -0
- pyopenapi_gen/helpers/type_resolution/composition_resolver.py +79 -0
- pyopenapi_gen/helpers/type_resolution/finalizer.py +105 -0
- pyopenapi_gen/helpers/type_resolution/named_resolver.py +172 -0
- pyopenapi_gen/helpers/type_resolution/object_resolver.py +216 -0
- pyopenapi_gen/helpers/type_resolution/primitive_resolver.py +109 -0
- pyopenapi_gen/helpers/type_resolution/resolver.py +47 -0
- pyopenapi_gen/helpers/url_utils.py +14 -0
- pyopenapi_gen/http_types.py +20 -0
- pyopenapi_gen/ir.py +165 -0
- pyopenapi_gen/py.typed +1 -0
- pyopenapi_gen/types/CLAUDE.md +140 -0
- pyopenapi_gen/types/__init__.py +11 -0
- pyopenapi_gen/types/contracts/__init__.py +13 -0
- pyopenapi_gen/types/contracts/protocols.py +106 -0
- pyopenapi_gen/types/contracts/types.py +28 -0
- pyopenapi_gen/types/resolvers/__init__.py +7 -0
- pyopenapi_gen/types/resolvers/reference_resolver.py +71 -0
- pyopenapi_gen/types/resolvers/response_resolver.py +177 -0
- pyopenapi_gen/types/resolvers/schema_resolver.py +498 -0
- pyopenapi_gen/types/services/__init__.py +5 -0
- pyopenapi_gen/types/services/type_service.py +165 -0
- pyopenapi_gen/types/strategies/__init__.py +5 -0
- pyopenapi_gen/types/strategies/response_strategy.py +310 -0
- pyopenapi_gen/visit/CLAUDE.md +272 -0
- pyopenapi_gen/visit/client_visitor.py +477 -0
- pyopenapi_gen/visit/docs_visitor.py +38 -0
- pyopenapi_gen/visit/endpoint/__init__.py +1 -0
- pyopenapi_gen/visit/endpoint/endpoint_visitor.py +292 -0
- pyopenapi_gen/visit/endpoint/generators/__init__.py +1 -0
- pyopenapi_gen/visit/endpoint/generators/docstring_generator.py +123 -0
- pyopenapi_gen/visit/endpoint/generators/endpoint_method_generator.py +222 -0
- pyopenapi_gen/visit/endpoint/generators/mock_generator.py +140 -0
- pyopenapi_gen/visit/endpoint/generators/overload_generator.py +252 -0
- pyopenapi_gen/visit/endpoint/generators/request_generator.py +103 -0
- pyopenapi_gen/visit/endpoint/generators/response_handler_generator.py +705 -0
- pyopenapi_gen/visit/endpoint/generators/signature_generator.py +83 -0
- pyopenapi_gen/visit/endpoint/generators/url_args_generator.py +207 -0
- pyopenapi_gen/visit/endpoint/processors/__init__.py +1 -0
- pyopenapi_gen/visit/endpoint/processors/import_analyzer.py +78 -0
- pyopenapi_gen/visit/endpoint/processors/parameter_processor.py +171 -0
- pyopenapi_gen/visit/exception_visitor.py +90 -0
- pyopenapi_gen/visit/model/__init__.py +0 -0
- pyopenapi_gen/visit/model/alias_generator.py +93 -0
- pyopenapi_gen/visit/model/dataclass_generator.py +553 -0
- pyopenapi_gen/visit/model/enum_generator.py +212 -0
- pyopenapi_gen/visit/model/model_visitor.py +198 -0
- pyopenapi_gen/visit/visitor.py +97 -0
- pyopenapi_gen-2.7.2.dist-info/METADATA +1169 -0
- pyopenapi_gen-2.7.2.dist-info/RECORD +137 -0
- pyopenapi_gen-2.7.2.dist-info/WHEEL +4 -0
- pyopenapi_gen-2.7.2.dist-info/entry_points.txt +2 -0
- pyopenapi_gen-2.7.2.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,810 @@
|
|
|
1
|
+
"""
|
|
2
|
+
cattrs converter utilities for generated clients.
|
|
3
|
+
|
|
4
|
+
This module provides cattrs converter functions for JSON serialization/deserialization
|
|
5
|
+
in generated API clients. It handles:
|
|
6
|
+
- Automatic camelCase ↔ snake_case transformation
|
|
7
|
+
- Python keyword conflicts (id → id_)
|
|
8
|
+
- base64 bytes encoding/decoding
|
|
9
|
+
- Nested object structures
|
|
10
|
+
|
|
11
|
+
The converter is configured globally to handle name transformations automatically
|
|
12
|
+
for all dataclasses, with no per-class metadata required.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from __future__ import annotations
|
|
16
|
+
|
|
17
|
+
import base64
|
|
18
|
+
import dataclasses
|
|
19
|
+
import re
|
|
20
|
+
import types
|
|
21
|
+
from datetime import date, datetime
|
|
22
|
+
from typing import Any, Callable, TypeVar, Union, get_args, get_origin, get_type_hints
|
|
23
|
+
|
|
24
|
+
import cattrs
|
|
25
|
+
from cattrs.errors import BaseValidationError, ClassValidationError, IterableValidationError
|
|
26
|
+
from cattrs.gen import make_dict_structure_fn, make_dict_unstructure_fn, override
|
|
27
|
+
|
|
28
|
+
T = TypeVar("T")
|
|
29
|
+
|
|
30
|
+
# Python keywords that get '_' suffix in generated code
|
|
31
|
+
PYTHON_KEYWORDS = {
|
|
32
|
+
"id",
|
|
33
|
+
"type",
|
|
34
|
+
"class",
|
|
35
|
+
"def",
|
|
36
|
+
"return",
|
|
37
|
+
"if",
|
|
38
|
+
"elif",
|
|
39
|
+
"else",
|
|
40
|
+
"for",
|
|
41
|
+
"while",
|
|
42
|
+
"import",
|
|
43
|
+
"from",
|
|
44
|
+
"as",
|
|
45
|
+
"pass",
|
|
46
|
+
"break",
|
|
47
|
+
"continue",
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def camel_to_snake(name: str) -> str:
|
|
52
|
+
"""
|
|
53
|
+
Convert camelCase to snake_case.
|
|
54
|
+
|
|
55
|
+
Scenario:
|
|
56
|
+
Convert JSON field names (camelCase) to Python field names (snake_case).
|
|
57
|
+
|
|
58
|
+
Expected Outcome:
|
|
59
|
+
Proper snake_case transformation with special handling for Python keywords.
|
|
60
|
+
|
|
61
|
+
Examples:
|
|
62
|
+
"pageSize" → "page_size"
|
|
63
|
+
"totalPages" → "total_pages"
|
|
64
|
+
"hasNext" → "has_next"
|
|
65
|
+
"id" → "id_" (Python keyword)
|
|
66
|
+
"_count" → "count" (leading underscore preserved as-is in JSON, but mapped)
|
|
67
|
+
"""
|
|
68
|
+
# Insert underscore before uppercase letters
|
|
69
|
+
snake = re.sub("([a-z0-9])([A-Z])", r"\1_\2", name)
|
|
70
|
+
snake = snake.lower()
|
|
71
|
+
|
|
72
|
+
# Add trailing underscore for Python keywords
|
|
73
|
+
if snake in PYTHON_KEYWORDS:
|
|
74
|
+
snake = f"{snake}_"
|
|
75
|
+
|
|
76
|
+
return snake
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def snake_to_camel(name: str) -> str:
|
|
80
|
+
"""
|
|
81
|
+
Convert snake_case to camelCase.
|
|
82
|
+
|
|
83
|
+
Scenario:
|
|
84
|
+
Convert Python field names (snake_case) back to JSON field names (camelCase).
|
|
85
|
+
|
|
86
|
+
Expected Outcome:
|
|
87
|
+
Proper camelCase transformation with special handling for Python keyword suffixes.
|
|
88
|
+
|
|
89
|
+
Examples:
|
|
90
|
+
"page_size" → "pageSize"
|
|
91
|
+
"total_pages" → "totalPages"
|
|
92
|
+
"has_next" → "hasNext"
|
|
93
|
+
"id_" → "id" (remove trailing underscore from Python keyword)
|
|
94
|
+
"count" → "_count" (if original JSON had leading underscore)
|
|
95
|
+
"""
|
|
96
|
+
# Remove trailing underscore if it was added for Python keyword
|
|
97
|
+
if name.endswith("_") and name[:-1] in PYTHON_KEYWORDS:
|
|
98
|
+
name = name[:-1]
|
|
99
|
+
|
|
100
|
+
# Convert snake_case to camelCase
|
|
101
|
+
components = name.split("_")
|
|
102
|
+
return components[0] + "".join(x.title() for x in components[1:])
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
# Global converter instance with automatic name transformation
|
|
106
|
+
converter = cattrs.Converter()
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _make_dataclass_structure_fn(cls: type[T]) -> Any:
|
|
110
|
+
"""
|
|
111
|
+
Create a structure function for a dataclass with automatic name transformation.
|
|
112
|
+
|
|
113
|
+
Scenario:
|
|
114
|
+
Generate a structure function that automatically converts JSON keys
|
|
115
|
+
(camelCase) to Python dataclass field names (snake_case).
|
|
116
|
+
|
|
117
|
+
Expected Outcome:
|
|
118
|
+
A function that cattrs can use to structure JSON into the dataclass,
|
|
119
|
+
with automatic field name transformation.
|
|
120
|
+
"""
|
|
121
|
+
# Get field renaming map (JSON key → Python field name)
|
|
122
|
+
field_overrides: dict[str, Any] = {}
|
|
123
|
+
if dataclasses.is_dataclass(cls):
|
|
124
|
+
for field in dataclasses.fields(cls):
|
|
125
|
+
python_name = field.name
|
|
126
|
+
json_key = python_name # Default: no transformation
|
|
127
|
+
|
|
128
|
+
# Check if class has Meta with explicit mappings
|
|
129
|
+
if hasattr(cls, "Meta") and hasattr(cls.Meta, "key_transform_with_load"): # type: ignore[attr-defined]
|
|
130
|
+
mappings: dict[str, str] = cls.Meta.key_transform_with_load # type: ignore[attr-defined]
|
|
131
|
+
# Meta.key_transform_with_load is: {"json_key": "python_field"}
|
|
132
|
+
# Find the JSON key that maps to this Python field
|
|
133
|
+
for jk, pf in mappings.items():
|
|
134
|
+
if pf == python_name:
|
|
135
|
+
json_key = jk
|
|
136
|
+
break
|
|
137
|
+
# If not in explicit mappings, use Python name as-is
|
|
138
|
+
# This preserves the original field name for user-defined dataclasses
|
|
139
|
+
# No Meta mappings - use Python name as-is (no camelCase assumption)
|
|
140
|
+
|
|
141
|
+
# Only add override if JSON key differs from Python field name
|
|
142
|
+
if json_key != python_name:
|
|
143
|
+
field_overrides[python_name] = override(rename=json_key)
|
|
144
|
+
|
|
145
|
+
# print(f"DEBUG: {cls.__name__} overrides: {field_overrides}")
|
|
146
|
+
return make_dict_structure_fn(cls, converter, **field_overrides)
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def _make_dataclass_unstructure_fn(cls: type[T]) -> Any:
|
|
150
|
+
"""
|
|
151
|
+
Create an unstructure function for a dataclass with field name transformation.
|
|
152
|
+
|
|
153
|
+
Scenario:
|
|
154
|
+
Generate an unstructure function that converts Python dataclass field names
|
|
155
|
+
to JSON keys using the Meta class mappings if available.
|
|
156
|
+
|
|
157
|
+
Expected Outcome:
|
|
158
|
+
A function that cattrs can use to unstructure the dataclass into JSON,
|
|
159
|
+
with field name transformation based on Meta.key_transform_with_dump.
|
|
160
|
+
For user-defined dataclasses without Meta, Python field names are used as-is.
|
|
161
|
+
"""
|
|
162
|
+
# Get field renaming map (Python field name → JSON key)
|
|
163
|
+
field_overrides: dict[str, Any] = {}
|
|
164
|
+
if dataclasses.is_dataclass(cls):
|
|
165
|
+
for field in dataclasses.fields(cls):
|
|
166
|
+
python_name = field.name
|
|
167
|
+
json_key = python_name # Default: no transformation
|
|
168
|
+
|
|
169
|
+
# Check if class has Meta with explicit mappings
|
|
170
|
+
if hasattr(cls, "Meta") and hasattr(cls.Meta, "key_transform_with_dump"): # type: ignore[attr-defined]
|
|
171
|
+
mappings: dict[str, str] = cls.Meta.key_transform_with_dump # type: ignore[attr-defined]
|
|
172
|
+
# Use explicit mapping if available, otherwise keep Python name
|
|
173
|
+
json_key = mappings.get(python_name, python_name)
|
|
174
|
+
# No Meta mappings - use Python name as-is (no camelCase assumption)
|
|
175
|
+
|
|
176
|
+
# Only add override if JSON key differs from Python field name
|
|
177
|
+
if json_key != python_name:
|
|
178
|
+
field_overrides[python_name] = override(rename=json_key)
|
|
179
|
+
|
|
180
|
+
return make_dict_unstructure_fn(cls, converter, **field_overrides)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def structure_with_base64_bytes(data: str | bytes, _: type[bytes]) -> bytes:
|
|
184
|
+
"""
|
|
185
|
+
Structure hook for base64-encoded bytes.
|
|
186
|
+
|
|
187
|
+
Handles OpenAPI format "byte" which is base64-encoded string.
|
|
188
|
+
|
|
189
|
+
Args:
|
|
190
|
+
data: Either base64 string or raw bytes
|
|
191
|
+
_: Target type (bytes)
|
|
192
|
+
|
|
193
|
+
Returns:
|
|
194
|
+
Decoded bytes
|
|
195
|
+
"""
|
|
196
|
+
if isinstance(data, str):
|
|
197
|
+
return base64.b64decode(data)
|
|
198
|
+
return data
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def unstructure_bytes_to_base64(data: bytes) -> str:
|
|
202
|
+
"""
|
|
203
|
+
Unstructure hook for bytes to base64 string.
|
|
204
|
+
|
|
205
|
+
Args:
|
|
206
|
+
data: Raw bytes
|
|
207
|
+
|
|
208
|
+
Returns:
|
|
209
|
+
base64-encoded string
|
|
210
|
+
"""
|
|
211
|
+
return base64.b64encode(data).decode("utf-8")
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
# Register base64 bytes handling
|
|
215
|
+
converter.register_structure_hook(bytes, structure_with_base64_bytes)
|
|
216
|
+
converter.register_unstructure_hook(bytes, unstructure_bytes_to_base64)
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def structure_datetime(data: str | datetime, _: type[datetime]) -> datetime:
|
|
220
|
+
"""
|
|
221
|
+
Structure hook for datetime fields.
|
|
222
|
+
|
|
223
|
+
Handles OpenAPI format "date-time" which is ISO 8601 string.
|
|
224
|
+
|
|
225
|
+
Args:
|
|
226
|
+
data: Either ISO 8601 string or datetime object
|
|
227
|
+
_: Target type (datetime)
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
datetime object
|
|
231
|
+
|
|
232
|
+
Raises:
|
|
233
|
+
ValueError: If string is not valid ISO 8601 format
|
|
234
|
+
"""
|
|
235
|
+
if isinstance(data, datetime):
|
|
236
|
+
return data
|
|
237
|
+
if isinstance(data, str):
|
|
238
|
+
# Try ISO 8601 format with timezone
|
|
239
|
+
try:
|
|
240
|
+
return datetime.fromisoformat(data.replace("Z", "+00:00"))
|
|
241
|
+
except ValueError:
|
|
242
|
+
# Try without timezone
|
|
243
|
+
return datetime.fromisoformat(data)
|
|
244
|
+
raise TypeError(f"Cannot convert {type(data)} to datetime")
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def unstructure_datetime(data: datetime) -> str:
|
|
248
|
+
"""
|
|
249
|
+
Unstructure hook for datetime to ISO 8601 string.
|
|
250
|
+
|
|
251
|
+
Args:
|
|
252
|
+
data: datetime object
|
|
253
|
+
|
|
254
|
+
Returns:
|
|
255
|
+
ISO 8601 formatted string
|
|
256
|
+
"""
|
|
257
|
+
return data.isoformat()
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def structure_date(data: str | date, _: type[date]) -> date:
|
|
261
|
+
"""
|
|
262
|
+
Structure hook for date fields.
|
|
263
|
+
|
|
264
|
+
Handles OpenAPI format "date" which is ISO 8601 date string.
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
data: Either ISO 8601 date string or date object
|
|
268
|
+
_: Target type (date)
|
|
269
|
+
|
|
270
|
+
Returns:
|
|
271
|
+
date object
|
|
272
|
+
|
|
273
|
+
Raises:
|
|
274
|
+
ValueError: If string is not valid ISO 8601 date format
|
|
275
|
+
"""
|
|
276
|
+
if isinstance(data, date):
|
|
277
|
+
return data
|
|
278
|
+
if isinstance(data, str):
|
|
279
|
+
return date.fromisoformat(data)
|
|
280
|
+
raise TypeError(f"Cannot convert {type(data)} to date")
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def unstructure_date(data: date) -> str:
|
|
284
|
+
"""
|
|
285
|
+
Unstructure hook for date to ISO 8601 string.
|
|
286
|
+
|
|
287
|
+
Args:
|
|
288
|
+
data: date object
|
|
289
|
+
|
|
290
|
+
Returns:
|
|
291
|
+
ISO 8601 formatted date string (YYYY-MM-DD)
|
|
292
|
+
"""
|
|
293
|
+
return data.isoformat()
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
# Register datetime and date handling
|
|
297
|
+
converter.register_structure_hook(datetime, structure_datetime)
|
|
298
|
+
converter.register_unstructure_hook(datetime, unstructure_datetime)
|
|
299
|
+
converter.register_structure_hook(date, structure_date)
|
|
300
|
+
converter.register_unstructure_hook(date, unstructure_date)
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
# =============================================================================
|
|
304
|
+
# Union Type Structure Hook
|
|
305
|
+
# =============================================================================
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
def _is_union_type(t: Any) -> bool:
|
|
309
|
+
"""
|
|
310
|
+
Check if a type is a Union type.
|
|
311
|
+
|
|
312
|
+
Scenario:
|
|
313
|
+
Detect Union types including both typing.Union and Python 3.10+ X | Y syntax.
|
|
314
|
+
|
|
315
|
+
Expected Outcome:
|
|
316
|
+
Returns True for Union types, False otherwise.
|
|
317
|
+
"""
|
|
318
|
+
origin = get_origin(t)
|
|
319
|
+
# Handle both typing.Union and types.UnionType (Python 3.10+ X | Y syntax)
|
|
320
|
+
return origin is Union or (hasattr(types, "UnionType") and isinstance(t, types.UnionType))
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
def _truncate_data_repr(data: Any, max_length: int = 200) -> str:
|
|
324
|
+
"""
|
|
325
|
+
Create a truncated string representation of data for error messages.
|
|
326
|
+
|
|
327
|
+
Args:
|
|
328
|
+
data: The data to represent
|
|
329
|
+
max_length: Maximum length of the output string
|
|
330
|
+
|
|
331
|
+
Returns:
|
|
332
|
+
A string representation, truncated if necessary
|
|
333
|
+
"""
|
|
334
|
+
try:
|
|
335
|
+
repr_str = repr(data)
|
|
336
|
+
if len(repr_str) <= max_length:
|
|
337
|
+
return repr_str
|
|
338
|
+
return repr_str[: max_length - 3] + "..."
|
|
339
|
+
except Exception:
|
|
340
|
+
return f"<{type(data).__name__}: repr failed>"
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
def _structure_union(data: Any, union_type: type) -> Any:
|
|
344
|
+
"""
|
|
345
|
+
Structure a Union type by trying each variant.
|
|
346
|
+
|
|
347
|
+
Scenario:
|
|
348
|
+
OpenAPI oneOf/anyOf schemas generate Union types. When deserialising
|
|
349
|
+
API responses, we need to determine which variant matches the data.
|
|
350
|
+
|
|
351
|
+
Expected Outcome:
|
|
352
|
+
Returns the structured data as the first matching Union variant.
|
|
353
|
+
|
|
354
|
+
Strategy:
|
|
355
|
+
1. If data is None and NoneType is in the union, return None
|
|
356
|
+
2. If data is a dict, try each dataclass variant
|
|
357
|
+
3. Try structuring with other variants (generic types, registered hooks)
|
|
358
|
+
4. Fall back to dict[str, Any] if present
|
|
359
|
+
5. Raise error if no variant matches
|
|
360
|
+
|
|
361
|
+
Args:
|
|
362
|
+
data: The raw data to structure
|
|
363
|
+
union_type: The Union type to structure into
|
|
364
|
+
|
|
365
|
+
Returns:
|
|
366
|
+
Structured data as one of the Union variants
|
|
367
|
+
|
|
368
|
+
Raises:
|
|
369
|
+
TypeError: If data is None but NoneType not in union
|
|
370
|
+
ValueError: If no Union variant matches the data
|
|
371
|
+
"""
|
|
372
|
+
args = get_args(union_type)
|
|
373
|
+
|
|
374
|
+
# Handle None explicitly
|
|
375
|
+
if data is None:
|
|
376
|
+
if type(None) in args:
|
|
377
|
+
return None
|
|
378
|
+
raise TypeError(f"None is not valid for {union_type}")
|
|
379
|
+
|
|
380
|
+
# Separate variants by type category
|
|
381
|
+
dataclass_variants: list[type[Any]] = []
|
|
382
|
+
dict_any_fallback = False
|
|
383
|
+
other_variants: list[Any] = [] # Can include generic types like List[T]
|
|
384
|
+
|
|
385
|
+
for arg in args:
|
|
386
|
+
if arg is type(None):
|
|
387
|
+
continue
|
|
388
|
+
elif isinstance(arg, type) and dataclasses.is_dataclass(arg):
|
|
389
|
+
dataclass_variants.append(arg)
|
|
390
|
+
elif get_origin(arg) is dict:
|
|
391
|
+
# Check if it's dict[str, Any] - our fallback type
|
|
392
|
+
dict_args = get_args(arg)
|
|
393
|
+
if dict_args == (str, Any):
|
|
394
|
+
dict_any_fallback = True
|
|
395
|
+
else:
|
|
396
|
+
# Other dict types should be tried as variants
|
|
397
|
+
other_variants.append(arg)
|
|
398
|
+
else:
|
|
399
|
+
# Includes plain types (str, int, datetime) and generic types (List[T])
|
|
400
|
+
other_variants.append(arg)
|
|
401
|
+
|
|
402
|
+
# If data is a dict, try dataclass variants first
|
|
403
|
+
if isinstance(data, dict):
|
|
404
|
+
errors: list[tuple[str, str]] = []
|
|
405
|
+
for variant in dataclass_variants:
|
|
406
|
+
try:
|
|
407
|
+
# Ensure hooks are registered for this variant
|
|
408
|
+
_register_structure_hooks_recursively(variant)
|
|
409
|
+
return converter.structure(data, variant)
|
|
410
|
+
except Exception as e:
|
|
411
|
+
errors.append((variant.__name__, str(e)))
|
|
412
|
+
continue
|
|
413
|
+
|
|
414
|
+
# If no dataclass matched and dict fallback is available, return raw dict
|
|
415
|
+
if dict_any_fallback:
|
|
416
|
+
return data
|
|
417
|
+
|
|
418
|
+
# All variants failed - provide helpful error with data preview
|
|
419
|
+
if errors:
|
|
420
|
+
data_preview = _truncate_data_repr(data)
|
|
421
|
+
error_details = "\n".join(f" - {name}: {err}" for name, err in errors)
|
|
422
|
+
raise ValueError(
|
|
423
|
+
f"Could not structure dict into any variant of {union_type}.\n"
|
|
424
|
+
f"Data: {data_preview}\n"
|
|
425
|
+
f"Tried variants:\n{error_details}"
|
|
426
|
+
)
|
|
427
|
+
|
|
428
|
+
# Try other variants using converter.structure (accumulate errors for debugging)
|
|
429
|
+
# This handles:
|
|
430
|
+
# - Types with registered hooks (datetime, date, bytes, etc.)
|
|
431
|
+
# - Generic types (List[T], Dict[K,V], etc.)
|
|
432
|
+
# - Plain types (str, int, etc.)
|
|
433
|
+
other_errors: list[tuple[str, str]] = []
|
|
434
|
+
for variant in other_variants:
|
|
435
|
+
try:
|
|
436
|
+
return converter.structure(data, variant)
|
|
437
|
+
except Exception as e: # nosec B112 - intentional: trying variants until one succeeds
|
|
438
|
+
variant_name = getattr(variant, "__name__", str(variant))
|
|
439
|
+
other_errors.append((variant_name, str(e)))
|
|
440
|
+
continue
|
|
441
|
+
|
|
442
|
+
# Last resort: if dict fallback is available and we have dict data
|
|
443
|
+
if dict_any_fallback and isinstance(data, dict):
|
|
444
|
+
return data
|
|
445
|
+
|
|
446
|
+
# Include data preview in error message for debugging
|
|
447
|
+
data_preview = _truncate_data_repr(data)
|
|
448
|
+
if other_errors:
|
|
449
|
+
error_details = "\n".join(f" - {name}: {err}" for name, err in other_errors)
|
|
450
|
+
raise TypeError(
|
|
451
|
+
f"Cannot structure {type(data).__name__} into {union_type}.\n"
|
|
452
|
+
f"Data: {data_preview}\n"
|
|
453
|
+
f"Tried variants:\n{error_details}"
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
raise TypeError(
|
|
457
|
+
f"Cannot structure {type(data).__name__} into {union_type}.\n"
|
|
458
|
+
f"Data: {data_preview}\n"
|
|
459
|
+
f"Expected one of: {[arg for arg in args if arg is not type(None)]}"
|
|
460
|
+
)
|
|
461
|
+
|
|
462
|
+
|
|
463
|
+
def _union_structure_hook(data: Any, union_type: type) -> Any:
|
|
464
|
+
"""
|
|
465
|
+
Structure hook for Union types.
|
|
466
|
+
|
|
467
|
+
This hook is called directly by cattrs when structuring a Union type.
|
|
468
|
+
It delegates to _structure_union to handle the actual structuring logic.
|
|
469
|
+
|
|
470
|
+
Args:
|
|
471
|
+
data: The raw data to structure
|
|
472
|
+
union_type: The Union type to structure into
|
|
473
|
+
|
|
474
|
+
Returns:
|
|
475
|
+
Structured data as one of the Union variants
|
|
476
|
+
"""
|
|
477
|
+
return _structure_union(data, union_type)
|
|
478
|
+
|
|
479
|
+
|
|
480
|
+
def _register_union_structure_hook() -> None:
|
|
481
|
+
"""
|
|
482
|
+
Register a structure hook for Union types.
|
|
483
|
+
|
|
484
|
+
Scenario:
|
|
485
|
+
cattrs doesn't natively handle Union types containing dataclasses.
|
|
486
|
+
This registers a hook that enables structuring for all Union types.
|
|
487
|
+
|
|
488
|
+
Expected Outcome:
|
|
489
|
+
All Union types (from OpenAPI oneOf/anyOf) can be structured correctly.
|
|
490
|
+
"""
|
|
491
|
+
|
|
492
|
+
def predicate(t: type) -> bool:
|
|
493
|
+
return _is_union_type(t)
|
|
494
|
+
|
|
495
|
+
converter.register_structure_hook_func(predicate, _union_structure_hook)
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
# Register the Union hook at module load time
|
|
499
|
+
_register_union_structure_hook()
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
def _register_structure_hooks_recursively(cls: type[Any], visited: set[type[Any]] | None = None) -> None:
|
|
503
|
+
"""
|
|
504
|
+
Recursively register structure hooks for a dataclass and all its nested dataclass types.
|
|
505
|
+
|
|
506
|
+
Scenario:
|
|
507
|
+
Before structuring a dataclass, we need to register hooks for it and all
|
|
508
|
+
nested data classes so that field name transformation works at all levels.
|
|
509
|
+
|
|
510
|
+
Expected Outcome:
|
|
511
|
+
All dataclass types in the object graph have structure hooks registered.
|
|
512
|
+
|
|
513
|
+
Args:
|
|
514
|
+
cls: The dataclass type to register hooks for
|
|
515
|
+
visited: Set of already-visited types to avoid infinite recursion
|
|
516
|
+
"""
|
|
517
|
+
if visited is None:
|
|
518
|
+
visited = set()
|
|
519
|
+
|
|
520
|
+
# Skip if already visited (avoid infinite recursion)
|
|
521
|
+
if cls in visited:
|
|
522
|
+
return
|
|
523
|
+
|
|
524
|
+
visited.add(cls)
|
|
525
|
+
|
|
526
|
+
# Only process dataclasses
|
|
527
|
+
if not dataclasses.is_dataclass(cls):
|
|
528
|
+
return
|
|
529
|
+
|
|
530
|
+
# Register structure hook for this dataclass
|
|
531
|
+
try:
|
|
532
|
+
# Use closure to capture cls value
|
|
533
|
+
def make_hook(captured_cls: type[Any]) -> Any:
|
|
534
|
+
def hook(d: dict[str, Any] | None, t: type[Any]) -> Any:
|
|
535
|
+
# Handle None input - cattrs passes None when JSON has null values
|
|
536
|
+
# for non-optional dataclass fields. This prevents TypeError when
|
|
537
|
+
# the generated structure function tries to check field presence
|
|
538
|
+
# using 'field_name' in d (which fails when d is None).
|
|
539
|
+
if d is None:
|
|
540
|
+
# None received for a non-optional field is a schema violation.
|
|
541
|
+
# This typically happens when:
|
|
542
|
+
# 1. OpenAPI schema marks field as required but API returns null
|
|
543
|
+
# 2. OpenAPI schema is missing 'nullable: true' for the field
|
|
544
|
+
raise TypeError(
|
|
545
|
+
f"Cannot structure None into {captured_cls.__name__}: "
|
|
546
|
+
f"Received null value for non-optional field. "
|
|
547
|
+
f"This is likely a schema mismatch - either the API is returning null "
|
|
548
|
+
f"for a required field, or the OpenAPI schema is missing 'nullable: true'. "
|
|
549
|
+
f"To fix: make the field optional in the OpenAPI spec by adding 'nullable: true' "
|
|
550
|
+
f"or removing it from the 'required' array."
|
|
551
|
+
)
|
|
552
|
+
return _make_dataclass_structure_fn(captured_cls)(d, t)
|
|
553
|
+
|
|
554
|
+
return hook
|
|
555
|
+
|
|
556
|
+
def predicate(t: type[Any], captured_cls: type[Any] = cls) -> bool:
|
|
557
|
+
return t is captured_cls
|
|
558
|
+
|
|
559
|
+
converter.register_structure_hook_func(
|
|
560
|
+
predicate,
|
|
561
|
+
make_hook(cls),
|
|
562
|
+
)
|
|
563
|
+
except Exception: # nosec B110
|
|
564
|
+
# Hook might already be registered - this is expected and safe to ignore
|
|
565
|
+
pass
|
|
566
|
+
|
|
567
|
+
# Recursively register hooks for nested dataclass fields
|
|
568
|
+
try:
|
|
569
|
+
type_hints = get_type_hints(cls)
|
|
570
|
+
except Exception:
|
|
571
|
+
# If type hints cannot be resolved (e.g. missing imports), fall back to field.type
|
|
572
|
+
type_hints = {}
|
|
573
|
+
|
|
574
|
+
for field in dataclasses.fields(cls):
|
|
575
|
+
# Use resolved type hint if available, otherwise raw field type
|
|
576
|
+
field_type = type_hints.get(field.name, field.type)
|
|
577
|
+
|
|
578
|
+
# Handle direct dataclass types
|
|
579
|
+
if isinstance(field_type, type) and dataclasses.is_dataclass(field_type):
|
|
580
|
+
_register_structure_hooks_recursively(field_type, visited)
|
|
581
|
+
continue
|
|
582
|
+
|
|
583
|
+
# Handle generic types (List[T], Optional[T], etc.) and Unions
|
|
584
|
+
_register_hooks_for_nested_types(field_type, visited, _register_structure_hooks_recursively)
|
|
585
|
+
|
|
586
|
+
|
|
587
|
+
def _register_hooks_for_nested_types(
|
|
588
|
+
type_hint: Any, visited: set[type], registrar: Callable[[type, set[type]], None]
|
|
589
|
+
) -> None:
|
|
590
|
+
"""
|
|
591
|
+
Recursively inspect a type hint to find and register hooks for nested dataclasses.
|
|
592
|
+
Handles Unions, Lists, Optionals, and other generic types.
|
|
593
|
+
"""
|
|
594
|
+
from typing import get_args, get_origin
|
|
595
|
+
|
|
596
|
+
# If it's a direct dataclass, register it
|
|
597
|
+
if isinstance(type_hint, type) and dataclasses.is_dataclass(type_hint):
|
|
598
|
+
registrar(type_hint, visited)
|
|
599
|
+
return
|
|
600
|
+
|
|
601
|
+
# If it's a generic type or Union, inspect its arguments
|
|
602
|
+
origin = get_origin(type_hint)
|
|
603
|
+
if origin is not None:
|
|
604
|
+
for arg in get_args(type_hint):
|
|
605
|
+
_register_hooks_for_nested_types(arg, visited, registrar)
|
|
606
|
+
|
|
607
|
+
|
|
608
|
+
def _extract_errors(e: BaseException | Exception, path: str = "") -> list[str]:
|
|
609
|
+
"""
|
|
610
|
+
Recursively extract human-readable error messages from cattrs validation errors.
|
|
611
|
+
|
|
612
|
+
Args:
|
|
613
|
+
e: The exception to extract errors from
|
|
614
|
+
path: The current path in the object structure (e.g. "items[0].id")
|
|
615
|
+
|
|
616
|
+
Returns:
|
|
617
|
+
List of formatted error messages
|
|
618
|
+
"""
|
|
619
|
+
messages = []
|
|
620
|
+
|
|
621
|
+
if isinstance(e, IterableValidationError):
|
|
622
|
+
# Handle list/iterable errors
|
|
623
|
+
for sub_exc in e.exceptions:
|
|
624
|
+
# We don't have reliable index information from cattrs for which item failed
|
|
625
|
+
# (it only returns the exceptions, not the indices), so we use [] to indicate
|
|
626
|
+
# an item in the list without specifying a misleading index.
|
|
627
|
+
new_path = f"{path}[]" if path else "[]"
|
|
628
|
+
messages.extend(_extract_errors(sub_exc, new_path))
|
|
629
|
+
|
|
630
|
+
elif isinstance(e, ClassValidationError):
|
|
631
|
+
# Handle dataclass/object errors
|
|
632
|
+
for sub_exc in e.exceptions:
|
|
633
|
+
# ClassValidationError usually wraps other exceptions.
|
|
634
|
+
# We try to find which field caused it by inspecting the notes attached by cattrs.
|
|
635
|
+
# Notes format: "Structuring class {ClassName} @ attribute {AttributeName}"
|
|
636
|
+
field_name = None
|
|
637
|
+
if hasattr(sub_exc, "__notes__"):
|
|
638
|
+
for note in sub_exc.__notes__:
|
|
639
|
+
match = re.search(r"Structuring class .* @ attribute (.*)", note)
|
|
640
|
+
if match:
|
|
641
|
+
field_name = match.group(1)
|
|
642
|
+
break
|
|
643
|
+
|
|
644
|
+
if field_name:
|
|
645
|
+
new_path = f"{path}.{field_name}" if path else field_name
|
|
646
|
+
messages.extend(_extract_errors(sub_exc, new_path))
|
|
647
|
+
else:
|
|
648
|
+
messages.extend(_extract_errors(sub_exc, path))
|
|
649
|
+
|
|
650
|
+
elif isinstance(e, ExceptionGroup):
|
|
651
|
+
# Handle Python 3.11+ ExceptionGroup if cattrs uses it
|
|
652
|
+
for sub_exc in e.exceptions:
|
|
653
|
+
messages.extend(_extract_errors(sub_exc, path))
|
|
654
|
+
|
|
655
|
+
else:
|
|
656
|
+
# Leaf exception (ValueError, TypeError, etc.)
|
|
657
|
+
msg = str(e)
|
|
658
|
+
if path:
|
|
659
|
+
messages.append(f"{path}: {msg}")
|
|
660
|
+
else:
|
|
661
|
+
messages.append(msg)
|
|
662
|
+
|
|
663
|
+
return messages
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
def structure_from_dict(data: dict[str, Any], cls: type[T]) -> T:
|
|
667
|
+
"""
|
|
668
|
+
Structure dict data into dataclass instance with automatic field name transformation.
|
|
669
|
+
|
|
670
|
+
Scenario:
|
|
671
|
+
Convert JSON response (with camelCase keys) into Python dataclass instance
|
|
672
|
+
(with snake_case fields). Works recursively for nested dataclasses.
|
|
673
|
+
|
|
674
|
+
Expected Outcome:
|
|
675
|
+
Properly structured dataclass instance with all field names transformed
|
|
676
|
+
automatically, including nested objects and lists.
|
|
677
|
+
|
|
678
|
+
Args:
|
|
679
|
+
data: Dictionary data (from JSON)
|
|
680
|
+
cls: Target dataclass type
|
|
681
|
+
|
|
682
|
+
Returns:
|
|
683
|
+
Instance of cls
|
|
684
|
+
"""
|
|
685
|
+
# Register structure hooks for this dataclass and all nested dataclasses
|
|
686
|
+
if dataclasses.is_dataclass(cls):
|
|
687
|
+
_register_structure_hooks_recursively(cls)
|
|
688
|
+
|
|
689
|
+
try:
|
|
690
|
+
return converter.structure(data, cls)
|
|
691
|
+
except BaseValidationError as e:
|
|
692
|
+
# Extract readable error messages
|
|
693
|
+
error_msgs = _extract_errors(e)
|
|
694
|
+
error_text = "\n".join(f"- {msg}" for msg in error_msgs)
|
|
695
|
+
raise ValueError(f"Failed to convert data to {cls.__name__}:\n{error_text}") from e
|
|
696
|
+
except Exception as e:
|
|
697
|
+
# Fallback for other errors
|
|
698
|
+
raise ValueError(f"Failed to convert data to {cls.__name__}: {e}") from e
|
|
699
|
+
|
|
700
|
+
|
|
701
|
+
def _register_unstructure_hooks_recursively(cls: type[Any], visited: set[type[Any]] | None = None) -> None:
|
|
702
|
+
"""
|
|
703
|
+
Recursively register unstructure hooks for a dataclass and all its nested dataclass types.
|
|
704
|
+
|
|
705
|
+
Scenario:
|
|
706
|
+
Before unstructuring a dataclass, we need to register hooks for it and all
|
|
707
|
+
nested data classes so that field name transformation works at all levels.
|
|
708
|
+
|
|
709
|
+
Expected Outcome:
|
|
710
|
+
All dataclass types in the object graph have unstructure hooks registered.
|
|
711
|
+
|
|
712
|
+
Args:
|
|
713
|
+
cls: The dataclass type to register hooks for
|
|
714
|
+
visited: Set of already-visited types to avoid infinite recursion
|
|
715
|
+
"""
|
|
716
|
+
if visited is None:
|
|
717
|
+
visited = set()
|
|
718
|
+
|
|
719
|
+
# Skip if already visited (avoid infinite recursion)
|
|
720
|
+
if cls in visited:
|
|
721
|
+
return
|
|
722
|
+
|
|
723
|
+
visited.add(cls)
|
|
724
|
+
|
|
725
|
+
# Only process dataclasses
|
|
726
|
+
if not dataclasses.is_dataclass(cls):
|
|
727
|
+
return
|
|
728
|
+
|
|
729
|
+
# Register unstructure hook for this dataclass
|
|
730
|
+
try:
|
|
731
|
+
# Use closure to capture cls value
|
|
732
|
+
def make_hook(captured_cls: type[Any]) -> Any:
|
|
733
|
+
def hook(obj: Any) -> Any:
|
|
734
|
+
return _make_dataclass_unstructure_fn(captured_cls)(obj)
|
|
735
|
+
|
|
736
|
+
return hook
|
|
737
|
+
|
|
738
|
+
def predicate(t: type[Any], captured_cls: type[Any] = cls) -> bool:
|
|
739
|
+
return t is captured_cls
|
|
740
|
+
|
|
741
|
+
converter.register_unstructure_hook_func(
|
|
742
|
+
predicate,
|
|
743
|
+
make_hook(cls),
|
|
744
|
+
)
|
|
745
|
+
except Exception: # nosec B110
|
|
746
|
+
# Hook might already be registered - this is expected and safe to ignore
|
|
747
|
+
pass
|
|
748
|
+
|
|
749
|
+
# Recursively register hooks for nested dataclass fields
|
|
750
|
+
|
|
751
|
+
try:
|
|
752
|
+
type_hints = get_type_hints(cls)
|
|
753
|
+
except Exception:
|
|
754
|
+
# If type hints cannot be resolved (e.g. missing imports), fall back to field.type
|
|
755
|
+
type_hints = {}
|
|
756
|
+
|
|
757
|
+
for field in dataclasses.fields(cls):
|
|
758
|
+
# Use resolved type hint if available, otherwise raw field type
|
|
759
|
+
field_type = type_hints.get(field.name, field.type)
|
|
760
|
+
|
|
761
|
+
# Handle direct dataclass types
|
|
762
|
+
if isinstance(field_type, type) and dataclasses.is_dataclass(field_type):
|
|
763
|
+
_register_unstructure_hooks_recursively(field_type, visited)
|
|
764
|
+
continue
|
|
765
|
+
|
|
766
|
+
# Handle generic types (List[T], Optional[T], etc.) and Unions
|
|
767
|
+
_register_hooks_for_nested_types(field_type, visited, _register_unstructure_hooks_recursively)
|
|
768
|
+
|
|
769
|
+
|
|
770
|
+
def unstructure_to_dict(instance: Any) -> dict[str, Any]:
|
|
771
|
+
"""
|
|
772
|
+
Unstructure dataclass instance to dict with automatic field name transformation.
|
|
773
|
+
|
|
774
|
+
Scenario:
|
|
775
|
+
Convert Python dataclass instance (with snake_case fields) into JSON-ready
|
|
776
|
+
dictionary (with camelCase keys). Works recursively for nested dataclasses.
|
|
777
|
+
|
|
778
|
+
Expected Outcome:
|
|
779
|
+
Dictionary with all field names transformed automatically to match JSON
|
|
780
|
+
format, including nested objects and lists.
|
|
781
|
+
|
|
782
|
+
Args:
|
|
783
|
+
instance: Dataclass instance
|
|
784
|
+
|
|
785
|
+
Returns:
|
|
786
|
+
Dictionary representation
|
|
787
|
+
"""
|
|
788
|
+
cls = type(instance)
|
|
789
|
+
|
|
790
|
+
# Register unstructure hooks for this dataclass and all nested dataclasses
|
|
791
|
+
if dataclasses.is_dataclass(cls):
|
|
792
|
+
_register_unstructure_hooks_recursively(cls)
|
|
793
|
+
|
|
794
|
+
result: dict[str, Any] = converter.unstructure(instance)
|
|
795
|
+
return result
|
|
796
|
+
|
|
797
|
+
|
|
798
|
+
__all__ = [
|
|
799
|
+
"converter",
|
|
800
|
+
"structure_from_dict",
|
|
801
|
+
"unstructure_to_dict",
|
|
802
|
+
"structure_with_base64_bytes",
|
|
803
|
+
"unstructure_bytes_to_base64",
|
|
804
|
+
"structure_datetime",
|
|
805
|
+
"unstructure_datetime",
|
|
806
|
+
"structure_date",
|
|
807
|
+
"unstructure_date",
|
|
808
|
+
"camel_to_snake",
|
|
809
|
+
"snake_to_camel",
|
|
810
|
+
]
|