ab-openapi-python-generator 2.1.4.dev1768280320__py3-none-any.whl → 2.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ab_openapi_python_generator/__init__.py +14 -10
- ab_openapi_python_generator/__main__.py +85 -0
- ab_openapi_python_generator/common.py +58 -0
- ab_openapi_python_generator/generate_data.py +235 -0
- ab_openapi_python_generator/language_converters/__init__.py +0 -0
- ab_openapi_python_generator/language_converters/python/__init__.py +0 -0
- ab_openapi_python_generator/language_converters/python/client_generator.py +450 -0
- ab_openapi_python_generator/language_converters/python/common.py +58 -0
- ab_openapi_python_generator/language_converters/python/exception_generator.py +23 -0
- ab_openapi_python_generator/language_converters/python/generator.py +52 -0
- ab_openapi_python_generator/language_converters/python/jinja_config.py +38 -0
- ab_openapi_python_generator/language_converters/python/model_generator.py +838 -0
- ab_openapi_python_generator/language_converters/python/templates/alias_union.jinja2 +17 -0
- ab_openapi_python_generator/language_converters/python/templates/async_client_httpx_pydantic_2.jinja2 +80 -0
- ab_openapi_python_generator/language_converters/python/templates/discriminator_enum.jinja2 +7 -0
- ab_openapi_python_generator/language_converters/python/templates/enum.jinja2 +11 -0
- ab_openapi_python_generator/language_converters/python/templates/http_exception.jinja2 +8 -0
- ab_openapi_python_generator/language_converters/python/templates/models.jinja2 +24 -0
- ab_openapi_python_generator/language_converters/python/templates/models_pydantic_2.jinja2 +28 -0
- ab_openapi_python_generator/language_converters/python/templates/sync_client_httpx_pydantic_2.jinja2 +80 -0
- ab_openapi_python_generator/models.py +101 -0
- ab_openapi_python_generator/parsers/__init__.py +13 -0
- ab_openapi_python_generator/parsers/openapi_30.py +65 -0
- ab_openapi_python_generator/parsers/openapi_31.py +65 -0
- ab_openapi_python_generator/py.typed +0 -0
- ab_openapi_python_generator/version_detector.py +67 -0
- {ab_openapi_python_generator-2.1.4.dev1768280320.dist-info → ab_openapi_python_generator-2.2.1.dist-info}/METADATA +21 -27
- ab_openapi_python_generator-2.2.1.dist-info/RECORD +31 -0
- {ab_openapi_python_generator-2.1.4.dev1768280320.dist-info → ab_openapi_python_generator-2.2.1.dist-info}/WHEEL +1 -1
- ab_openapi_python_generator-2.2.1.dist-info/entry_points.txt +2 -0
- ab_openapi_python_generator-2.1.4.dev1768280320.dist-info/RECORD +0 -6
- ab_openapi_python_generator-2.1.4.dev1768280320.dist-info/entry_points.txt +0 -3
- {ab_openapi_python_generator-2.1.4.dev1768280320.dist-info → ab_openapi_python_generator-2.2.1.dist-info/licenses}/LICENSE +0 -0
|
@@ -0,0 +1,838 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import itertools
|
|
4
|
+
import re
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import Dict, List, Optional, Set, Tuple, Union
|
|
7
|
+
|
|
8
|
+
import click
|
|
9
|
+
from openapi_pydantic.v3.v3_0 import (
|
|
10
|
+
Components as Components30,
|
|
11
|
+
)
|
|
12
|
+
from openapi_pydantic.v3.v3_0 import (
|
|
13
|
+
Reference as Reference30,
|
|
14
|
+
)
|
|
15
|
+
from openapi_pydantic.v3.v3_0 import (
|
|
16
|
+
Schema as Schema30,
|
|
17
|
+
)
|
|
18
|
+
from openapi_pydantic.v3.v3_1 import (
|
|
19
|
+
Components as Components31,
|
|
20
|
+
)
|
|
21
|
+
from openapi_pydantic.v3.v3_1 import (
|
|
22
|
+
Reference as Reference31,
|
|
23
|
+
)
|
|
24
|
+
from openapi_pydantic.v3.v3_1 import (
|
|
25
|
+
Schema as Schema31,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
from ab_openapi_python_generator.common import PydanticVersion
|
|
29
|
+
from ab_openapi_python_generator.language_converters.python import common
|
|
30
|
+
from ab_openapi_python_generator.language_converters.python.jinja_config import (
|
|
31
|
+
ALIAS_UNION_TEMPLATE,
|
|
32
|
+
DISCRIMINATOR_ENUM_TEMPLATE,
|
|
33
|
+
ENUM_TEMPLATE,
|
|
34
|
+
MODELS_TEMPLATE,
|
|
35
|
+
MODELS_TEMPLATE_PYDANTIC_V2,
|
|
36
|
+
create_jinja_env,
|
|
37
|
+
)
|
|
38
|
+
from ab_openapi_python_generator.models import Model, Property, TypeConversion
|
|
39
|
+
|
|
40
|
+
# Type aliases for compatibility
|
|
41
|
+
Schema = Union[Schema30, Schema31]
|
|
42
|
+
Reference = Union[Reference30, Reference31]
|
|
43
|
+
Components = Union[Components30, Components31]
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
# Map of wrapper component name -> TypeConversion to use instead of generating wrapper module
|
|
47
|
+
_REFERENCE_TYPE_OVERRIDES: dict[str, TypeConversion] = {}
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _is_null_schema(s: object) -> bool:
|
|
51
|
+
t = getattr(s, "type", None)
|
|
52
|
+
return t == "null" or str(t) == "DataType.NULL"
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _build_nullable_wrapper_overrides(components: Components) -> dict[str, TypeConversion]:
|
|
56
|
+
"""
|
|
57
|
+
Collapse component schemas shaped like:
|
|
58
|
+
X = anyOf/oneOf: [ $ref: Y, {type: null} ]
|
|
59
|
+
into an override so refs to X become Optional[Y] without generating X.py.
|
|
60
|
+
"""
|
|
61
|
+
overrides: dict[str, TypeConversion] = {}
|
|
62
|
+
schemas = getattr(components, "schemas", None)
|
|
63
|
+
if not schemas:
|
|
64
|
+
return overrides
|
|
65
|
+
|
|
66
|
+
for schema_name, schema in schemas.items():
|
|
67
|
+
# Only non-discriminator wrappers
|
|
68
|
+
if getattr(schema, "discriminator", None) is not None:
|
|
69
|
+
continue
|
|
70
|
+
|
|
71
|
+
variants = getattr(schema, "anyOf", None) or getattr(schema, "oneOf", None)
|
|
72
|
+
if not variants or len(variants) != 2:
|
|
73
|
+
continue
|
|
74
|
+
|
|
75
|
+
ref = next((v for v in variants if isinstance(v, (Reference30, Reference31))), None)
|
|
76
|
+
nul = next((v for v in variants if isinstance(v, (Schema30, Schema31)) and _is_null_schema(v)), None)
|
|
77
|
+
if ref is None or nul is None:
|
|
78
|
+
continue
|
|
79
|
+
|
|
80
|
+
wrapper_name = common.normalize_symbol(schema_name)
|
|
81
|
+
target_model = common.normalize_symbol(ref.ref.split("/")[-1])
|
|
82
|
+
|
|
83
|
+
overrides[wrapper_name] = TypeConversion(
|
|
84
|
+
original_type=ref.ref,
|
|
85
|
+
converted_type=f"Optional[{target_model}]",
|
|
86
|
+
import_types=[f"from .{target_model} import {target_model}"],
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
return overrides
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def _get_discriminator_key(schema: Schema) -> Optional[str]:
|
|
93
|
+
"""
|
|
94
|
+
Return discriminator property name if present on the schema.
|
|
95
|
+
openapi-pydantic v3.x uses `schema.discriminator.propertyName` (common),
|
|
96
|
+
but we defensively check a couple of variants.
|
|
97
|
+
"""
|
|
98
|
+
disc = getattr(schema, "discriminator", None)
|
|
99
|
+
if disc is None:
|
|
100
|
+
return None
|
|
101
|
+
|
|
102
|
+
# Most common: propertyName
|
|
103
|
+
key = getattr(disc, "propertyName", None)
|
|
104
|
+
if key:
|
|
105
|
+
return str(key)
|
|
106
|
+
|
|
107
|
+
# Defensive fallbacks
|
|
108
|
+
key = getattr(disc, "property_name", None)
|
|
109
|
+
if key:
|
|
110
|
+
return str(key)
|
|
111
|
+
|
|
112
|
+
return None
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _schema_is_union(schema: Schema) -> bool:
|
|
116
|
+
used = schema.oneOf if schema.oneOf is not None else schema.anyOf
|
|
117
|
+
return used is not None and len(used) > 0
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _alias_name_for_property(prop_name: str) -> str:
|
|
121
|
+
# token_issuer -> TokenIssuer, foo-bar -> FooBar, etc.
|
|
122
|
+
parts = re.split(r"[^a-zA-Z0-9]+", prop_name.strip())
|
|
123
|
+
parts = [p for p in parts if p]
|
|
124
|
+
return "".join(p[:1].upper() + p[1:] for p in parts)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def _dedupe_imports(imports: Optional[List[str]]) -> List[str]:
|
|
128
|
+
if not imports:
|
|
129
|
+
return []
|
|
130
|
+
seen: Set[str] = set()
|
|
131
|
+
out: List[str] = []
|
|
132
|
+
for imp in imports:
|
|
133
|
+
if imp and imp not in seen:
|
|
134
|
+
seen.add(imp)
|
|
135
|
+
out.append(imp)
|
|
136
|
+
return out
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def _render_union_alias_module(
|
|
140
|
+
*,
|
|
141
|
+
jinja_env,
|
|
142
|
+
alias_name: str,
|
|
143
|
+
union_type: str,
|
|
144
|
+
discriminator_key: Optional[str],
|
|
145
|
+
member_imports: List[str],
|
|
146
|
+
) -> str:
|
|
147
|
+
return jinja_env.get_template(ALIAS_UNION_TEMPLATE).render(
|
|
148
|
+
alias_name=alias_name,
|
|
149
|
+
union_type=union_type,
|
|
150
|
+
discriminator_key=discriminator_key,
|
|
151
|
+
member_imports=_dedupe_imports(member_imports),
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
@dataclass(frozen=True)
|
|
156
|
+
class DiscriminatorBinding:
|
|
157
|
+
enum_name: str
|
|
158
|
+
enum_member: str
|
|
159
|
+
discriminator_key: str
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def _enum_member_name(value: str) -> str:
|
|
163
|
+
# Make a safe enum member name, e.g. "pkce" -> "PKCE", "oauth2" -> "OAUTH2"
|
|
164
|
+
return common.normalize_symbol(str(value)).upper()
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def _pascal_discriminator(discriminator_key: str) -> str:
|
|
168
|
+
"""
|
|
169
|
+
Convert a discriminator property name (e.g. "type", "kind", "event_type")
|
|
170
|
+
into a PascalCase suffix ("Type", "Kind", "EventType").
|
|
171
|
+
"""
|
|
172
|
+
sym = common.normalize_symbol(discriminator_key)
|
|
173
|
+
parts = [p for p in sym.replace("-", "_").split("_") if p]
|
|
174
|
+
return "".join(p[:1].upper() + p[1:] for p in parts) or "Discriminator"
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def _pascal_schema_name(schema_name: str) -> str:
|
|
178
|
+
"""
|
|
179
|
+
Convert a schema name like "token_issuer" into "TokenIssuer" for generated type/enum names.
|
|
180
|
+
(We don't want enum class names like `token_issuerType`.)
|
|
181
|
+
"""
|
|
182
|
+
sym = common.normalize_symbol(schema_name)
|
|
183
|
+
parts = [p for p in sym.replace("-", "_").split("_") if p]
|
|
184
|
+
return "".join(p[:1].upper() + p[1:] for p in parts) or "Schema"
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def _invert_discriminator_mapping(mapping: Optional[dict]) -> Dict[str, str]:
|
|
188
|
+
"""
|
|
189
|
+
discriminator.mapping is usually { "<disc_value>": "<$ref>" }
|
|
190
|
+
Return { "<$ref>": "<disc_value>" }
|
|
191
|
+
"""
|
|
192
|
+
if not mapping:
|
|
193
|
+
return {}
|
|
194
|
+
inv: Dict[str, str] = {}
|
|
195
|
+
for k, v in mapping.items():
|
|
196
|
+
if isinstance(v, str):
|
|
197
|
+
inv[v] = str(k)
|
|
198
|
+
return inv
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def _discover_discriminated_unions(
|
|
202
|
+
components: Components,
|
|
203
|
+
) -> Tuple[Dict[str, DiscriminatorBinding], Dict[str, List[Tuple[str, str]]]]:
|
|
204
|
+
"""
|
|
205
|
+
Discover discriminated unions in BOTH:
|
|
206
|
+
- top-level component schemas (schema.discriminator + schema.oneOf)
|
|
207
|
+
- inline/property schemas (property_schema.discriminator + property_schema.oneOf)
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
- bindings: { "<MemberModelName>": DiscriminatorBinding(...) }
|
|
211
|
+
- enum_members_by_name: { "<EnumName>": [(MEMBER_NAME, member_value), ...] }
|
|
212
|
+
"""
|
|
213
|
+
bindings: Dict[str, DiscriminatorBinding] = {}
|
|
214
|
+
enum_members_by_name: Dict[str, List[Tuple[str, str]]] = {}
|
|
215
|
+
|
|
216
|
+
if not getattr(components, "schemas", None):
|
|
217
|
+
return bindings, enum_members_by_name
|
|
218
|
+
|
|
219
|
+
def register_union(alias_name: str, union_schema: Schema) -> None:
|
|
220
|
+
disc = getattr(union_schema, "discriminator", None)
|
|
221
|
+
one_of = getattr(union_schema, "oneOf", None)
|
|
222
|
+
if disc is None or not one_of:
|
|
223
|
+
return
|
|
224
|
+
|
|
225
|
+
# openapi_pydantic uses propertyName, but be defensive
|
|
226
|
+
discriminator_key = getattr(disc, "propertyName", None) or getattr(disc, "property_name", None)
|
|
227
|
+
if not discriminator_key:
|
|
228
|
+
return
|
|
229
|
+
|
|
230
|
+
enum_name = f"{_pascal_schema_name(alias_name)}{_pascal_discriminator(discriminator_key)}"
|
|
231
|
+
ref_to_value = _invert_discriminator_mapping(getattr(disc, "mapping", None))
|
|
232
|
+
|
|
233
|
+
members: List[Tuple[str, str]] = []
|
|
234
|
+
for sub in one_of:
|
|
235
|
+
if not isinstance(sub, (Reference30, Reference31)):
|
|
236
|
+
continue
|
|
237
|
+
ref = sub.ref
|
|
238
|
+
member_model = common.normalize_symbol(ref.split("/")[-1])
|
|
239
|
+
disc_value = ref_to_value.get(ref) or member_model
|
|
240
|
+
|
|
241
|
+
member_name = _enum_member_name(disc_value)
|
|
242
|
+
members.append((member_name, disc_value))
|
|
243
|
+
|
|
244
|
+
bindings[member_model] = DiscriminatorBinding(
|
|
245
|
+
enum_name=enum_name,
|
|
246
|
+
enum_member=member_name,
|
|
247
|
+
discriminator_key=discriminator_key,
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
if members:
|
|
251
|
+
# de-dupe by enum member name
|
|
252
|
+
seen = set()
|
|
253
|
+
deduped: List[Tuple[str, str]] = []
|
|
254
|
+
for mn, mv in members:
|
|
255
|
+
if mn in seen:
|
|
256
|
+
continue
|
|
257
|
+
seen.add(mn)
|
|
258
|
+
deduped.append((mn, mv))
|
|
259
|
+
enum_members_by_name[enum_name] = deduped
|
|
260
|
+
|
|
261
|
+
# 1) top-level discriminated unions
|
|
262
|
+
for schema_name, schema in components.schemas.items():
|
|
263
|
+
disc = getattr(schema, "discriminator", None)
|
|
264
|
+
one_of = getattr(schema, "oneOf", None)
|
|
265
|
+
if disc is not None and one_of:
|
|
266
|
+
register_union(schema_name, schema)
|
|
267
|
+
|
|
268
|
+
# 2) inline/property discriminated unions
|
|
269
|
+
for _parent_name, parent_schema in components.schemas.items():
|
|
270
|
+
props = getattr(parent_schema, "properties", None) or {}
|
|
271
|
+
for prop_name, prop_schema in props.items():
|
|
272
|
+
disc = getattr(prop_schema, "discriminator", None)
|
|
273
|
+
one_of = getattr(prop_schema, "oneOf", None)
|
|
274
|
+
if disc is not None and one_of:
|
|
275
|
+
# alias name should be based on the property name (e.g. oauth2_client -> OAuth2Client)
|
|
276
|
+
register_union(prop_name, prop_schema)
|
|
277
|
+
|
|
278
|
+
return bindings, enum_members_by_name
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
def _build_discriminator_bindings(components: Components) -> Dict[str, DiscriminatorBinding]:
|
|
282
|
+
"""
|
|
283
|
+
Scan components.schemas for discriminator-based oneOf schemas and return:
|
|
284
|
+
{ "<MemberModelName>": DiscriminatorBinding(...) }
|
|
285
|
+
|
|
286
|
+
We use:
|
|
287
|
+
- discriminator.propertyName as the key
|
|
288
|
+
- discriminator.mapping (preferred) to get per-member discriminator values
|
|
289
|
+
- fallback: schema name when mapping not present
|
|
290
|
+
"""
|
|
291
|
+
bindings: Dict[str, DiscriminatorBinding] = {}
|
|
292
|
+
|
|
293
|
+
if not getattr(components, "schemas", None):
|
|
294
|
+
return bindings
|
|
295
|
+
|
|
296
|
+
for schema_name, schema in components.schemas.items():
|
|
297
|
+
disc = getattr(schema, "discriminator", None)
|
|
298
|
+
one_of = getattr(schema, "oneOf", None)
|
|
299
|
+
|
|
300
|
+
if disc is None or not one_of:
|
|
301
|
+
continue
|
|
302
|
+
|
|
303
|
+
discriminator_key = getattr(disc, "propertyName", None) or getattr(disc, "property_name", None)
|
|
304
|
+
if discriminator_key is None:
|
|
305
|
+
continue
|
|
306
|
+
|
|
307
|
+
enum_name = f"{_pascal_schema_name(schema_name)}{_pascal_discriminator(discriminator_key)}"
|
|
308
|
+
|
|
309
|
+
mapping = getattr(disc, "mapping", None) or {}
|
|
310
|
+
# invert mapping to get $ref -> value
|
|
311
|
+
ref_to_value: Dict[str, str] = {ref: value for value, ref in mapping.items()}
|
|
312
|
+
|
|
313
|
+
for sub in one_of:
|
|
314
|
+
if not (isinstance(sub, Reference30) or isinstance(sub, Reference31)):
|
|
315
|
+
continue
|
|
316
|
+
|
|
317
|
+
ref = sub.ref
|
|
318
|
+
member_model = common.normalize_symbol(ref.split("/")[-1])
|
|
319
|
+
|
|
320
|
+
disc_value = ref_to_value.get(ref)
|
|
321
|
+
if disc_value is None:
|
|
322
|
+
disc_value = member_model
|
|
323
|
+
|
|
324
|
+
bindings[member_model] = DiscriminatorBinding(
|
|
325
|
+
enum_name=enum_name,
|
|
326
|
+
enum_member=_enum_member_name(disc_value),
|
|
327
|
+
discriminator_key=discriminator_key,
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
return bindings
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
def type_converter( # noqa: C901
|
|
334
|
+
schema: Union[Schema, Reference],
|
|
335
|
+
required: bool = False,
|
|
336
|
+
model_name: Optional[str] = None,
|
|
337
|
+
) -> TypeConversion:
|
|
338
|
+
"""
|
|
339
|
+
Converts an OpenAPI type to a Python type.
|
|
340
|
+
:param schema: Schema or Reference containing the type to be converted
|
|
341
|
+
:param model_name: Name of the original model on which the type is defined
|
|
342
|
+
:param required: Flag indicating if the type is required by the class
|
|
343
|
+
:return: The converted type
|
|
344
|
+
"""
|
|
345
|
+
# Handle Reference objects by converting them to type references
|
|
346
|
+
if isinstance(schema, Reference30) or isinstance(schema, Reference31):
|
|
347
|
+
import_type = common.normalize_symbol(schema.ref.split("/")[-1])
|
|
348
|
+
# Nullable-wrapper collapse: ref to X may be overridden to Optional[Y]
|
|
349
|
+
override = _REFERENCE_TYPE_OVERRIDES.get(import_type)
|
|
350
|
+
if override is not None:
|
|
351
|
+
return TypeConversion(
|
|
352
|
+
original_type=schema.ref,
|
|
353
|
+
converted_type=override.converted_type,
|
|
354
|
+
import_types=override.import_types,
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
if required:
|
|
358
|
+
converted_type = import_type
|
|
359
|
+
else:
|
|
360
|
+
converted_type = f"Optional[{import_type}]"
|
|
361
|
+
|
|
362
|
+
return TypeConversion(
|
|
363
|
+
original_type=schema.ref,
|
|
364
|
+
converted_type=converted_type,
|
|
365
|
+
import_types=([f"from .{import_type} import {import_type}"] if import_type != model_name else None),
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
if required:
|
|
369
|
+
pre_type = ""
|
|
370
|
+
post_type = ""
|
|
371
|
+
else:
|
|
372
|
+
pre_type = "Optional["
|
|
373
|
+
post_type = "]"
|
|
374
|
+
|
|
375
|
+
original_type = (
|
|
376
|
+
schema.type.value
|
|
377
|
+
if hasattr(schema.type, "value") and schema.type is not None
|
|
378
|
+
else str(schema.type)
|
|
379
|
+
if schema.type is not None
|
|
380
|
+
else "object"
|
|
381
|
+
)
|
|
382
|
+
import_types: Optional[List[str]] = None
|
|
383
|
+
|
|
384
|
+
if schema.allOf is not None:
|
|
385
|
+
conversions = []
|
|
386
|
+
for sub_schema in schema.allOf:
|
|
387
|
+
if isinstance(sub_schema, Schema30) or isinstance(sub_schema, Schema31):
|
|
388
|
+
conversions.append(type_converter(sub_schema, True))
|
|
389
|
+
else:
|
|
390
|
+
import_type = common.normalize_symbol(sub_schema.ref.split("/")[-1])
|
|
391
|
+
if import_type == model_name and model_name is not None:
|
|
392
|
+
conversions.append(
|
|
393
|
+
TypeConversion(
|
|
394
|
+
original_type=sub_schema.ref,
|
|
395
|
+
converted_type='"' + model_name + '"',
|
|
396
|
+
import_types=None,
|
|
397
|
+
)
|
|
398
|
+
)
|
|
399
|
+
else:
|
|
400
|
+
import_types = [f"from .{import_type} import {import_type}"]
|
|
401
|
+
conversions.append(
|
|
402
|
+
TypeConversion(
|
|
403
|
+
original_type=sub_schema.ref,
|
|
404
|
+
converted_type=import_type,
|
|
405
|
+
import_types=import_types,
|
|
406
|
+
)
|
|
407
|
+
)
|
|
408
|
+
|
|
409
|
+
original_type = "tuple<" + ",".join([i.original_type for i in conversions]) + ">"
|
|
410
|
+
if len(conversions) == 1:
|
|
411
|
+
converted_type = conversions[0].converted_type
|
|
412
|
+
else:
|
|
413
|
+
converted_type = "Tuple[" + ",".join([i.converted_type for i in conversions]) + "]"
|
|
414
|
+
|
|
415
|
+
converted_type = pre_type + converted_type + post_type
|
|
416
|
+
# Collect first import from referenced sub-schemas only (skip empty lists)
|
|
417
|
+
import_types = [
|
|
418
|
+
i.import_types[0] for i in conversions if i.import_types is not None and len(i.import_types) > 0
|
|
419
|
+
] or None
|
|
420
|
+
|
|
421
|
+
elif schema.oneOf is not None or schema.anyOf is not None:
|
|
422
|
+
used = schema.oneOf if schema.oneOf is not None else schema.anyOf
|
|
423
|
+
used = used if used is not None else []
|
|
424
|
+
conversions = []
|
|
425
|
+
for sub_schema in used:
|
|
426
|
+
if isinstance(sub_schema, Schema30) or isinstance(sub_schema, Schema31):
|
|
427
|
+
conversions.append(type_converter(sub_schema, True))
|
|
428
|
+
else:
|
|
429
|
+
import_type = common.normalize_symbol(sub_schema.ref.split("/")[-1])
|
|
430
|
+
import_types = [f"from .{import_type} import {import_type}"]
|
|
431
|
+
conversions.append(
|
|
432
|
+
TypeConversion(
|
|
433
|
+
original_type=sub_schema.ref,
|
|
434
|
+
converted_type=import_type,
|
|
435
|
+
import_types=import_types,
|
|
436
|
+
)
|
|
437
|
+
)
|
|
438
|
+
original_type = "union<" + ",".join([i.original_type for i in conversions]) + ">"
|
|
439
|
+
|
|
440
|
+
if len(conversions) == 1:
|
|
441
|
+
converted_type = conversions[0].converted_type
|
|
442
|
+
else:
|
|
443
|
+
converted_type = "Union[" + ",".join([i.converted_type for i in conversions]) + "]"
|
|
444
|
+
|
|
445
|
+
converted_type = pre_type + converted_type + post_type
|
|
446
|
+
import_types = list(itertools.chain(*[i.import_types for i in conversions if i.import_types is not None]))
|
|
447
|
+
# We only want to auto convert to datetime if orjson is used throghout the code, otherwise we can not
|
|
448
|
+
# serialize it to JSON.
|
|
449
|
+
elif (schema.type == "string" or str(schema.type) == "DataType.STRING") and (
|
|
450
|
+
schema.schema_format is None or not common.get_use_orjson()
|
|
451
|
+
):
|
|
452
|
+
converted_type = pre_type + "str" + post_type
|
|
453
|
+
elif (
|
|
454
|
+
(schema.type == "string" or str(schema.type) == "DataType.STRING")
|
|
455
|
+
and schema.schema_format is not None
|
|
456
|
+
and schema.schema_format.startswith("uuid")
|
|
457
|
+
and common.get_use_orjson()
|
|
458
|
+
):
|
|
459
|
+
if len(schema.schema_format) > 4 and schema.schema_format[4].isnumeric():
|
|
460
|
+
uuid_type = schema.schema_format.upper()
|
|
461
|
+
converted_type = pre_type + uuid_type + post_type
|
|
462
|
+
import_types = ["from pydantic import " + uuid_type]
|
|
463
|
+
else:
|
|
464
|
+
converted_type = pre_type + "UUID" + post_type
|
|
465
|
+
import_types = ["from uuid import UUID"]
|
|
466
|
+
elif (schema.type == "string" or str(schema.type) == "DataType.STRING") and schema.schema_format == "date-time":
|
|
467
|
+
converted_type = pre_type + "datetime" + post_type
|
|
468
|
+
import_types = ["from datetime import datetime"]
|
|
469
|
+
elif schema.type == "integer" or str(schema.type) == "DataType.INTEGER":
|
|
470
|
+
converted_type = pre_type + "int" + post_type
|
|
471
|
+
elif schema.type == "number" or str(schema.type) == "DataType.NUMBER":
|
|
472
|
+
converted_type = pre_type + "float" + post_type
|
|
473
|
+
elif schema.type == "boolean" or str(schema.type) == "DataType.BOOLEAN":
|
|
474
|
+
converted_type = pre_type + "bool" + post_type
|
|
475
|
+
elif schema.type == "array" or str(schema.type) == "DataType.ARRAY":
|
|
476
|
+
retVal = pre_type + "List["
|
|
477
|
+
if isinstance(schema.items, Reference30) or isinstance(schema.items, Reference31):
|
|
478
|
+
converted_reference = _generate_property_from_reference(
|
|
479
|
+
model_name or "", "", schema.items, schema, required
|
|
480
|
+
)
|
|
481
|
+
import_types = converted_reference.type.import_types
|
|
482
|
+
original_type = "array<" + converted_reference.type.original_type + ">"
|
|
483
|
+
retVal += converted_reference.type.converted_type
|
|
484
|
+
elif isinstance(schema.items, Schema30) or isinstance(schema.items, Schema31):
|
|
485
|
+
type_str = schema.items.type
|
|
486
|
+
if hasattr(type_str, "value"):
|
|
487
|
+
type_value = str(type_str.value) if type_str is not None else "unknown"
|
|
488
|
+
else:
|
|
489
|
+
type_value = str(type_str) if type_str is not None else "unknown"
|
|
490
|
+
original_type = "array<" + type_value + ">"
|
|
491
|
+
retVal += type_converter(schema.items, True).converted_type
|
|
492
|
+
else:
|
|
493
|
+
original_type = "array<unknown>"
|
|
494
|
+
retVal += "Any"
|
|
495
|
+
|
|
496
|
+
converted_type = retVal + "]" + post_type
|
|
497
|
+
elif schema.type == "object" or str(schema.type) == "DataType.OBJECT":
|
|
498
|
+
converted_type = pre_type + "Dict[str, Any]" + post_type
|
|
499
|
+
elif schema.type == "null" or str(schema.type) == "DataType.NULL":
|
|
500
|
+
converted_type = pre_type + "None" + post_type
|
|
501
|
+
elif schema.type is None:
|
|
502
|
+
converted_type = pre_type + "Any" + post_type
|
|
503
|
+
else:
|
|
504
|
+
# Handle DataType enum types as strings
|
|
505
|
+
if hasattr(schema.type, "value"):
|
|
506
|
+
# Single DataType enum
|
|
507
|
+
if schema.type.value == "string":
|
|
508
|
+
# Check for UUID format first
|
|
509
|
+
if (
|
|
510
|
+
schema.schema_format is not None
|
|
511
|
+
and schema.schema_format.startswith("uuid")
|
|
512
|
+
and common.get_use_orjson()
|
|
513
|
+
):
|
|
514
|
+
if len(schema.schema_format) > 4 and schema.schema_format[4].isnumeric():
|
|
515
|
+
uuid_type = schema.schema_format.upper()
|
|
516
|
+
converted_type = pre_type + uuid_type + post_type
|
|
517
|
+
import_types = ["from pydantic import " + uuid_type]
|
|
518
|
+
else:
|
|
519
|
+
converted_type = pre_type + "UUID" + post_type
|
|
520
|
+
import_types = ["from uuid import UUID"]
|
|
521
|
+
# Check for date-time format
|
|
522
|
+
elif schema.schema_format == "date-time":
|
|
523
|
+
converted_type = pre_type + "datetime" + post_type
|
|
524
|
+
import_types = ["from datetime import datetime"]
|
|
525
|
+
else:
|
|
526
|
+
converted_type = pre_type + "str" + post_type
|
|
527
|
+
elif schema.type.value == "integer":
|
|
528
|
+
converted_type = pre_type + "int" + post_type
|
|
529
|
+
elif schema.type.value == "number":
|
|
530
|
+
converted_type = pre_type + "float" + post_type
|
|
531
|
+
elif schema.type.value == "boolean":
|
|
532
|
+
converted_type = pre_type + "bool" + post_type
|
|
533
|
+
elif schema.type.value == "array":
|
|
534
|
+
converted_type = pre_type + "List[Any]" + post_type
|
|
535
|
+
elif schema.type.value == "object":
|
|
536
|
+
converted_type = pre_type + "Dict[str, Any]" + post_type
|
|
537
|
+
elif schema.type.value == "null":
|
|
538
|
+
converted_type = pre_type + "None" + post_type
|
|
539
|
+
else:
|
|
540
|
+
converted_type = pre_type + "str" + post_type # Default fallback
|
|
541
|
+
elif isinstance(schema.type, list) and len(schema.type) > 0:
|
|
542
|
+
# List of DataType enums - use first one
|
|
543
|
+
first_type = schema.type[0]
|
|
544
|
+
if hasattr(first_type, "value"):
|
|
545
|
+
if first_type.value == "string":
|
|
546
|
+
# Check for UUID format first
|
|
547
|
+
if (
|
|
548
|
+
schema.schema_format is not None
|
|
549
|
+
and schema.schema_format.startswith("uuid")
|
|
550
|
+
and common.get_use_orjson()
|
|
551
|
+
):
|
|
552
|
+
if len(schema.schema_format) > 4 and schema.schema_format[4].isnumeric():
|
|
553
|
+
uuid_type = schema.schema_format.upper()
|
|
554
|
+
converted_type = pre_type + uuid_type + post_type
|
|
555
|
+
import_types = ["from pydantic import " + uuid_type]
|
|
556
|
+
else:
|
|
557
|
+
converted_type = pre_type + "UUID" + post_type
|
|
558
|
+
import_types = ["from uuid import UUID"]
|
|
559
|
+
# Check for date-time format
|
|
560
|
+
elif schema.schema_format == "date-time":
|
|
561
|
+
converted_type = pre_type + "datetime" + post_type
|
|
562
|
+
import_types = ["from datetime import datetime"]
|
|
563
|
+
else:
|
|
564
|
+
converted_type = pre_type + "str" + post_type
|
|
565
|
+
elif first_type.value == "integer":
|
|
566
|
+
converted_type = pre_type + "int" + post_type
|
|
567
|
+
elif first_type.value == "number":
|
|
568
|
+
converted_type = pre_type + "float" + post_type
|
|
569
|
+
elif first_type.value == "boolean":
|
|
570
|
+
converted_type = pre_type + "bool" + post_type
|
|
571
|
+
elif first_type.value == "array":
|
|
572
|
+
converted_type = pre_type + "List[Any]" + post_type
|
|
573
|
+
elif first_type.value == "object":
|
|
574
|
+
converted_type = pre_type + "Dict[str, Any]" + post_type
|
|
575
|
+
elif first_type.value == "null":
|
|
576
|
+
converted_type = pre_type + "None" + post_type
|
|
577
|
+
else:
|
|
578
|
+
converted_type = pre_type + "str" + post_type # Default fallback
|
|
579
|
+
else:
|
|
580
|
+
converted_type = pre_type + "str" + post_type # Default fallback
|
|
581
|
+
else:
|
|
582
|
+
converted_type = pre_type + "str" + post_type # Default fallback
|
|
583
|
+
|
|
584
|
+
return TypeConversion(
|
|
585
|
+
original_type=original_type,
|
|
586
|
+
converted_type=converted_type,
|
|
587
|
+
import_types=import_types,
|
|
588
|
+
)
|
|
589
|
+
|
|
590
|
+
|
|
591
|
+
def _generate_property_from_schema(
|
|
592
|
+
model_name: str, name: str, schema: Schema, parent_schema: Optional[Schema] = None
|
|
593
|
+
) -> Property:
|
|
594
|
+
"""
|
|
595
|
+
Generates a property from a schema. It takes the type of the schema and converts it to a python type, and then
|
|
596
|
+
creates the according property.
|
|
597
|
+
:param model_name: Name of the model this property belongs to
|
|
598
|
+
:param name: Name of the schema
|
|
599
|
+
:param schema: schema to be converted
|
|
600
|
+
:param parent_schema: Component this belongs to
|
|
601
|
+
:return: Property
|
|
602
|
+
"""
|
|
603
|
+
required = parent_schema is not None and parent_schema.required is not None and name in parent_schema.required
|
|
604
|
+
|
|
605
|
+
import_type = None
|
|
606
|
+
if required:
|
|
607
|
+
import_type = [] if name == model_name else [name]
|
|
608
|
+
|
|
609
|
+
return Property(
|
|
610
|
+
name=name,
|
|
611
|
+
type=type_converter(schema, required, model_name),
|
|
612
|
+
required=required,
|
|
613
|
+
default=None if required else "None",
|
|
614
|
+
import_type=import_type,
|
|
615
|
+
)
|
|
616
|
+
|
|
617
|
+
|
|
618
|
+
def _generate_property_from_reference(
|
|
619
|
+
model_name: str,
|
|
620
|
+
name: str,
|
|
621
|
+
reference: Reference,
|
|
622
|
+
parent_schema: Optional[Schema] = None,
|
|
623
|
+
force_required: bool = False,
|
|
624
|
+
) -> Property:
|
|
625
|
+
"""
|
|
626
|
+
Generates a property from a reference. It takes the name of the reference as the type, and then
|
|
627
|
+
returns a property type
|
|
628
|
+
:param name: Name of the schema
|
|
629
|
+
:param reference: reference to be converted
|
|
630
|
+
:param parent_schema: Component this belongs to
|
|
631
|
+
:param force_required: Force the property to be required
|
|
632
|
+
:return: Property and model to be imported by the file
|
|
633
|
+
"""
|
|
634
|
+
required = (
|
|
635
|
+
parent_schema is not None and parent_schema.required is not None and name in parent_schema.required
|
|
636
|
+
) or force_required
|
|
637
|
+
import_model = common.normalize_symbol(reference.ref.split("/")[-1])
|
|
638
|
+
|
|
639
|
+
if import_model == model_name:
|
|
640
|
+
type_conv = TypeConversion(
|
|
641
|
+
original_type=reference.ref,
|
|
642
|
+
converted_type=(import_model if required else 'Optional["' + import_model + '"]'),
|
|
643
|
+
import_types=None,
|
|
644
|
+
)
|
|
645
|
+
else:
|
|
646
|
+
type_conv = TypeConversion(
|
|
647
|
+
original_type=reference.ref,
|
|
648
|
+
converted_type=(import_model if required else "Optional[" + import_model + "]"),
|
|
649
|
+
import_types=[f"from .{import_model} import {import_model}"],
|
|
650
|
+
)
|
|
651
|
+
return Property(
|
|
652
|
+
name=name,
|
|
653
|
+
type=type_conv,
|
|
654
|
+
required=required,
|
|
655
|
+
default=None if required else "None",
|
|
656
|
+
import_type=[import_model],
|
|
657
|
+
)
|
|
658
|
+
|
|
659
|
+
|
|
660
|
+
def generate_models(components: Components, pydantic_version: PydanticVersion = PydanticVersion.V2) -> List[Model]:
|
|
661
|
+
"""
|
|
662
|
+
Receives components from an OpenAPI 3.0+ specification and generates the models from it.
|
|
663
|
+
Additionally:
|
|
664
|
+
- Detects unions / discriminated unions in property schemas (oneOf/anyOf)
|
|
665
|
+
- Emits a named alias module (e.g. TokenIssuer.py)
|
|
666
|
+
- Rewrites the property type to use that alias (instead of Union[...])
|
|
667
|
+
"""
|
|
668
|
+
models: List[Model] = []
|
|
669
|
+
|
|
670
|
+
if components.schemas is None:
|
|
671
|
+
return models
|
|
672
|
+
|
|
673
|
+
jinja_env = create_jinja_env()
|
|
674
|
+
# Build nullable-wrapper overrides so refs to simple wrappers (X = anyOf[$ref Y, null])
|
|
675
|
+
# are collapsed to Optional[Y] and we avoid generating X.py (which can collide on Windows).
|
|
676
|
+
global _REFERENCE_TYPE_OVERRIDES
|
|
677
|
+
_REFERENCE_TYPE_OVERRIDES = _build_nullable_wrapper_overrides(components)
|
|
678
|
+
|
|
679
|
+
discriminator_bindings, enum_members_by_name = _discover_discriminated_unions(components)
|
|
680
|
+
|
|
681
|
+
# Track alias modules so we only create each once
|
|
682
|
+
alias_models_by_name: Dict[str, Model] = {}
|
|
683
|
+
|
|
684
|
+
for schema_name, schema_or_reference in components.schemas.items():
|
|
685
|
+
name = common.normalize_symbol(schema_name)
|
|
686
|
+
|
|
687
|
+
# Don't generate standalone modules for nullable wrapper components
|
|
688
|
+
if name in _REFERENCE_TYPE_OVERRIDES:
|
|
689
|
+
continue
|
|
690
|
+
|
|
691
|
+
# --------------------------
|
|
692
|
+
# Enums
|
|
693
|
+
# --------------------------
|
|
694
|
+
if schema_or_reference.enum is not None:
|
|
695
|
+
value_dict = schema_or_reference.model_dump()
|
|
696
|
+
value_dict["enum"] = [(common.normalize_symbol(str(i)).upper(), i) for i in value_dict["enum"]]
|
|
697
|
+
m = Model(
|
|
698
|
+
file_name=name,
|
|
699
|
+
content=jinja_env.get_template(ENUM_TEMPLATE).render(name=name, **value_dict),
|
|
700
|
+
openapi_object=schema_or_reference,
|
|
701
|
+
properties=[],
|
|
702
|
+
)
|
|
703
|
+
try:
|
|
704
|
+
compile(m.content, "<string>", "exec")
|
|
705
|
+
models.append(m)
|
|
706
|
+
except SyntaxError as e: # pragma: no cover
|
|
707
|
+
click.echo(f"Error in model {name}: {e}")
|
|
708
|
+
|
|
709
|
+
continue # pragma: no cover
|
|
710
|
+
|
|
711
|
+
# --------------------------
|
|
712
|
+
# Normal models
|
|
713
|
+
# --------------------------
|
|
714
|
+
properties: List[Property] = []
|
|
715
|
+
property_iterator = schema_or_reference.properties.items() if schema_or_reference.properties is not None else {}
|
|
716
|
+
|
|
717
|
+
for prop_name, prop_schema in property_iterator:
|
|
718
|
+
# Reference property
|
|
719
|
+
if isinstance(prop_schema, Reference30) or isinstance(prop_schema, Reference31):
|
|
720
|
+
conv_property = _generate_property_from_reference(name, prop_name, prop_schema, schema_or_reference)
|
|
721
|
+
properties.append(conv_property)
|
|
722
|
+
continue
|
|
723
|
+
|
|
724
|
+
# Schema property
|
|
725
|
+
conv_property = _generate_property_from_schema(name, prop_name, prop_schema, schema_or_reference)
|
|
726
|
+
|
|
727
|
+
# If this model is a discriminated union member, and this property
|
|
728
|
+
# is the discriminator key, make it a Literal[...] with a default
|
|
729
|
+
binding = discriminator_bindings.get(name)
|
|
730
|
+
if binding and common.normalize_symbol(conv_property.name) == common.normalize_symbol(
|
|
731
|
+
binding.discriminator_key
|
|
732
|
+
):
|
|
733
|
+
conv_property.required = True
|
|
734
|
+
conv_property.default = f"{binding.enum_name}.{binding.enum_member}"
|
|
735
|
+
|
|
736
|
+
extra_imports = [
|
|
737
|
+
"from typing import Literal",
|
|
738
|
+
f"from .{binding.enum_name} import {binding.enum_name}",
|
|
739
|
+
]
|
|
740
|
+
|
|
741
|
+
conv_property.type = TypeConversion(
|
|
742
|
+
original_type=conv_property.type.original_type,
|
|
743
|
+
converted_type=f"Literal[{binding.enum_name}.{binding.enum_member}]",
|
|
744
|
+
import_types=extra_imports,
|
|
745
|
+
)
|
|
746
|
+
|
|
747
|
+
# -----------------------------------------
|
|
748
|
+
# NEW: union / discriminated union factoring
|
|
749
|
+
# -----------------------------------------
|
|
750
|
+
if isinstance(prop_schema, (Schema30, Schema31)) and _schema_is_union(prop_schema):
|
|
751
|
+
alias_name = _alias_name_for_property(prop_name)
|
|
752
|
+
discriminator_key = _get_discriminator_key(prop_schema)
|
|
753
|
+
|
|
754
|
+
# Only generate standalone alias modules for DISCRIMINATED unions.
|
|
755
|
+
# Plain unions (including nullable wrappers) are left inline.
|
|
756
|
+
if discriminator_key is not None:
|
|
757
|
+
# Build the union type and gather imports from members.
|
|
758
|
+
# Important: we want a NON-optional union for the alias definition.
|
|
759
|
+
union_conv = type_converter(prop_schema, required=True, model_name=name)
|
|
760
|
+
union_type_str = union_conv.converted_type # e.g. Union[A,B,C]
|
|
761
|
+
member_imports = union_conv.import_types or []
|
|
762
|
+
|
|
763
|
+
# Create alias module once
|
|
764
|
+
if alias_name not in alias_models_by_name:
|
|
765
|
+
alias_content = _render_union_alias_module(
|
|
766
|
+
jinja_env=jinja_env,
|
|
767
|
+
alias_name=alias_name,
|
|
768
|
+
union_type=union_type_str,
|
|
769
|
+
discriminator_key=discriminator_key,
|
|
770
|
+
member_imports=member_imports,
|
|
771
|
+
)
|
|
772
|
+
|
|
773
|
+
# Validate alias module compiles
|
|
774
|
+
try:
|
|
775
|
+
compile(alias_content, "<string>", "exec")
|
|
776
|
+
except SyntaxError as e: # pragma: no cover
|
|
777
|
+
click.echo(f"Error in union alias {alias_name}: {e}") # pragma: no cover
|
|
778
|
+
|
|
779
|
+
alias_models_by_name[alias_name] = Model(
|
|
780
|
+
file_name=alias_name,
|
|
781
|
+
content=alias_content,
|
|
782
|
+
openapi_object=prop_schema,
|
|
783
|
+
properties=[],
|
|
784
|
+
)
|
|
785
|
+
|
|
786
|
+
# Rewrite property type to use alias
|
|
787
|
+
rewritten_type = alias_name if conv_property.required else f"Optional[{alias_name}]"
|
|
788
|
+
conv_property.type = TypeConversion(
|
|
789
|
+
original_type=conv_property.type.original_type,
|
|
790
|
+
converted_type=rewritten_type,
|
|
791
|
+
import_types=[f"from .{alias_name} import {alias_name}"],
|
|
792
|
+
)
|
|
793
|
+
|
|
794
|
+
properties.append(conv_property)
|
|
795
|
+
|
|
796
|
+
template_name = MODELS_TEMPLATE_PYDANTIC_V2 if pydantic_version == PydanticVersion.V2 else MODELS_TEMPLATE
|
|
797
|
+
|
|
798
|
+
generated_content = jinja_env.get_template(template_name).render(
|
|
799
|
+
schema_name=name, schema=schema_or_reference, properties=properties
|
|
800
|
+
)
|
|
801
|
+
|
|
802
|
+
try:
|
|
803
|
+
compile(generated_content, "<string>", "exec")
|
|
804
|
+
except SyntaxError as e: # pragma: no cover
|
|
805
|
+
click.echo(f"Error in model {name}: {e}") # pragma: no cover
|
|
806
|
+
|
|
807
|
+
models.append(
|
|
808
|
+
Model(
|
|
809
|
+
file_name=name,
|
|
810
|
+
content=generated_content,
|
|
811
|
+
openapi_object=schema_or_reference,
|
|
812
|
+
properties=properties,
|
|
813
|
+
)
|
|
814
|
+
)
|
|
815
|
+
|
|
816
|
+
# Ensure enum modules for discriminators are included
|
|
817
|
+
enum_models: List[Model] = []
|
|
818
|
+
for enum_name, members in enum_members_by_name.items():
|
|
819
|
+
enum_content = jinja_env.get_template(DISCRIMINATOR_ENUM_TEMPLATE).render(enum_name=enum_name, members=members)
|
|
820
|
+
try:
|
|
821
|
+
compile(enum_content, "<string>", "exec")
|
|
822
|
+
except SyntaxError as e: # pragma: no cover
|
|
823
|
+
click.echo(f"Error in enum {enum_name}: {e}") # pragma: no cover
|
|
824
|
+
|
|
825
|
+
# Model.openapi_object is required (non-Optional). Enum modules don't map to a real schema,
|
|
826
|
+
# so attach a tiny placeholder schema to satisfy validation.
|
|
827
|
+
placeholder_schema = Schema31() if isinstance(components, Components31) else Schema30()
|
|
828
|
+
|
|
829
|
+
enum_models.append(
|
|
830
|
+
Model(file_name=enum_name, content=enum_content, openapi_object=placeholder_schema, properties=[])
|
|
831
|
+
)
|
|
832
|
+
|
|
833
|
+
# Ensure alias modules are included in output
|
|
834
|
+
models.extend(alias_models_by_name.values())
|
|
835
|
+
# Append enum modules last
|
|
836
|
+
models.extend(enum_models)
|
|
837
|
+
|
|
838
|
+
return models
|