datamodel-code-generator 0.28.4__py3-none-any.whl → 0.28.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamodel-code-generator might be problematic. Click here for more details.
- datamodel_code_generator/__init__.py +2 -2
- datamodel_code_generator/__main__.py +1 -1
- datamodel_code_generator/model/base.py +18 -6
- datamodel_code_generator/model/dataclass.py +4 -0
- datamodel_code_generator/model/enum.py +3 -1
- datamodel_code_generator/model/msgspec.py +4 -0
- datamodel_code_generator/model/pydantic/base_model.py +4 -0
- datamodel_code_generator/model/pydantic/types.py +2 -0
- datamodel_code_generator/model/pydantic_v2/base_model.py +2 -0
- datamodel_code_generator/model/scalar.py +2 -0
- datamodel_code_generator/model/typed_dict.py +2 -0
- datamodel_code_generator/model/types.py +2 -0
- datamodel_code_generator/model/union.py +2 -0
- datamodel_code_generator/parser/base.py +31 -8
- datamodel_code_generator/parser/graphql.py +3 -2
- datamodel_code_generator/parser/jsonschema.py +18 -5
- datamodel_code_generator/parser/openapi.py +3 -2
- datamodel_code_generator/reference.py +4 -2
- datamodel_code_generator/types.py +58 -11
- {datamodel_code_generator-0.28.4.dist-info → datamodel_code_generator-0.28.5.dist-info}/METADATA +1 -1
- {datamodel_code_generator-0.28.4.dist-info → datamodel_code_generator-0.28.5.dist-info}/RECORD +24 -24
- {datamodel_code_generator-0.28.4.dist-info → datamodel_code_generator-0.28.5.dist-info}/WHEEL +0 -0
- {datamodel_code_generator-0.28.4.dist-info → datamodel_code_generator-0.28.5.dist-info}/entry_points.txt +0 -0
- {datamodel_code_generator-0.28.4.dist-info → datamodel_code_generator-0.28.5.dist-info}/licenses/LICENSE +0 -0
|
@@ -280,7 +280,7 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
|
|
|
280
280
|
custom_formatters_kwargs: dict[str, Any] | None = None,
|
|
281
281
|
use_pendulum: bool = False,
|
|
282
282
|
http_query_parameters: Sequence[tuple[str, str]] | None = None,
|
|
283
|
-
|
|
283
|
+
treat_dot_as_module: bool = False,
|
|
284
284
|
use_exact_imports: bool = False,
|
|
285
285
|
union_mode: UnionMode | None = None,
|
|
286
286
|
output_datetime_class: DatetimeClassType | None = None,
|
|
@@ -475,7 +475,7 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
|
|
|
475
475
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
476
476
|
use_pendulum=use_pendulum,
|
|
477
477
|
http_query_parameters=http_query_parameters,
|
|
478
|
-
|
|
478
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
479
479
|
use_exact_imports=use_exact_imports,
|
|
480
480
|
default_field_extras=default_field_extras,
|
|
481
481
|
target_datetime_class=output_datetime_class,
|
|
@@ -516,7 +516,7 @@ def main(args: Sequence[str] | None = None) -> Exit: # noqa: PLR0911, PLR0912,
|
|
|
516
516
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
517
517
|
use_pendulum=config.use_pendulum,
|
|
518
518
|
http_query_parameters=config.http_query_parameters,
|
|
519
|
-
|
|
519
|
+
treat_dot_as_module=config.treat_dot_as_module,
|
|
520
520
|
use_exact_imports=config.use_exact_imports,
|
|
521
521
|
union_mode=config.union_mode,
|
|
522
522
|
output_datetime_class=config.output_datetime_class,
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import re
|
|
3
4
|
from abc import ABC, abstractmethod
|
|
4
5
|
from collections import defaultdict
|
|
5
6
|
from copy import deepcopy
|
|
@@ -203,18 +204,27 @@ def get_template(template_file_path: Path) -> Template:
|
|
|
203
204
|
return environment.get_template(template_file_path.name)
|
|
204
205
|
|
|
205
206
|
|
|
206
|
-
def
|
|
207
|
+
def sanitize_module_name(name: str, *, treat_dot_as_module: bool) -> str:
|
|
208
|
+
pattern = r"[^0-9a-zA-Z_.]" if treat_dot_as_module else r"[^0-9a-zA-Z_]"
|
|
209
|
+
sanitized = re.sub(pattern, "_", name)
|
|
210
|
+
if sanitized and sanitized[0].isdigit():
|
|
211
|
+
sanitized = f"_{sanitized}"
|
|
212
|
+
return sanitized
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def get_module_path(name: str, file_path: Path | None, *, treat_dot_as_module: bool) -> list[str]:
|
|
207
216
|
if file_path:
|
|
217
|
+
sanitized_stem = sanitize_module_name(file_path.stem, treat_dot_as_module=treat_dot_as_module)
|
|
208
218
|
return [
|
|
209
219
|
*file_path.parts[:-1],
|
|
210
|
-
|
|
220
|
+
sanitized_stem,
|
|
211
221
|
*name.split(".")[:-1],
|
|
212
222
|
]
|
|
213
223
|
return name.split(".")[:-1]
|
|
214
224
|
|
|
215
225
|
|
|
216
|
-
def get_module_name(name: str, file_path: Path | None) -> str:
|
|
217
|
-
return ".".join(get_module_path(name, file_path))
|
|
226
|
+
def get_module_name(name: str, file_path: Path | None, *, treat_dot_as_module: bool) -> str:
|
|
227
|
+
return ".".join(get_module_path(name, file_path, treat_dot_as_module=treat_dot_as_module))
|
|
218
228
|
|
|
219
229
|
|
|
220
230
|
class TemplateBase(ABC):
|
|
@@ -265,6 +275,7 @@ class DataModel(TemplateBase, Nullable, ABC):
|
|
|
265
275
|
default: Any = UNDEFINED,
|
|
266
276
|
nullable: bool = False,
|
|
267
277
|
keyword_only: bool = False,
|
|
278
|
+
treat_dot_as_module: bool = False,
|
|
268
279
|
) -> None:
|
|
269
280
|
self.keyword_only = keyword_only
|
|
270
281
|
if not self.TEMPLATE_FILE_PATH:
|
|
@@ -313,6 +324,7 @@ class DataModel(TemplateBase, Nullable, ABC):
|
|
|
313
324
|
self._additional_imports.extend(self.DEFAULT_IMPORTS)
|
|
314
325
|
self.default: Any = default
|
|
315
326
|
self._nullable: bool = nullable
|
|
327
|
+
self._treat_dot_as_module: bool = treat_dot_as_module
|
|
316
328
|
|
|
317
329
|
def _validate_fields(self, fields: list[DataModelFieldBase]) -> list[DataModelFieldBase]:
|
|
318
330
|
names: set[str] = set()
|
|
@@ -393,11 +405,11 @@ class DataModel(TemplateBase, Nullable, ABC):
|
|
|
393
405
|
|
|
394
406
|
@property
|
|
395
407
|
def module_path(self) -> list[str]:
|
|
396
|
-
return get_module_path(self.name, self.file_path)
|
|
408
|
+
return get_module_path(self.name, self.file_path, treat_dot_as_module=self._treat_dot_as_module)
|
|
397
409
|
|
|
398
410
|
@property
|
|
399
411
|
def module_name(self) -> str:
|
|
400
|
-
return get_module_name(self.name, self.file_path)
|
|
412
|
+
return get_module_name(self.name, self.file_path, treat_dot_as_module=self._treat_dot_as_module)
|
|
401
413
|
|
|
402
414
|
@property
|
|
403
415
|
def all_data_types(self) -> Iterator[DataType]:
|
|
@@ -53,6 +53,7 @@ class DataClass(DataModel):
|
|
|
53
53
|
default: Any = UNDEFINED,
|
|
54
54
|
nullable: bool = False,
|
|
55
55
|
keyword_only: bool = False,
|
|
56
|
+
treat_dot_as_module: bool = False,
|
|
56
57
|
) -> None:
|
|
57
58
|
super().__init__(
|
|
58
59
|
reference=reference,
|
|
@@ -68,6 +69,7 @@ class DataClass(DataModel):
|
|
|
68
69
|
default=default,
|
|
69
70
|
nullable=nullable,
|
|
70
71
|
keyword_only=keyword_only,
|
|
72
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
71
73
|
)
|
|
72
74
|
|
|
73
75
|
|
|
@@ -144,6 +146,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
144
146
|
use_union_operator: bool = False, # noqa: FBT001, FBT002
|
|
145
147
|
use_pendulum: bool = False, # noqa: FBT001, FBT002
|
|
146
148
|
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
149
|
+
treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
|
|
147
150
|
) -> None:
|
|
148
151
|
super().__init__(
|
|
149
152
|
python_version,
|
|
@@ -154,6 +157,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
154
157
|
use_union_operator,
|
|
155
158
|
use_pendulum,
|
|
156
159
|
target_datetime_class,
|
|
160
|
+
treat_dot_as_module,
|
|
157
161
|
)
|
|
158
162
|
|
|
159
163
|
datetime_map = (
|
|
@@ -52,6 +52,7 @@ class Enum(DataModel):
|
|
|
52
52
|
default: Any = UNDEFINED,
|
|
53
53
|
nullable: bool = False,
|
|
54
54
|
keyword_only: bool = False,
|
|
55
|
+
treat_dot_as_module: bool = False,
|
|
55
56
|
) -> None:
|
|
56
57
|
super().__init__(
|
|
57
58
|
reference=reference,
|
|
@@ -67,6 +68,7 @@ class Enum(DataModel):
|
|
|
67
68
|
default=default,
|
|
68
69
|
nullable=nullable,
|
|
69
70
|
keyword_only=keyword_only,
|
|
71
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
70
72
|
)
|
|
71
73
|
|
|
72
74
|
if not base_classes and type_:
|
|
@@ -91,7 +93,7 @@ class Enum(DataModel):
|
|
|
91
93
|
|
|
92
94
|
for field in self.fields:
|
|
93
95
|
# Remove surrounding quotes from field default value
|
|
94
|
-
field_default = (field.default or "").strip("'\"")
|
|
96
|
+
field_default = str(field.default or "").strip("'\"")
|
|
95
97
|
|
|
96
98
|
# Compare values after removing quotes
|
|
97
99
|
if field_default == str_value:
|
|
@@ -97,6 +97,7 @@ class Struct(DataModel):
|
|
|
97
97
|
default: Any = UNDEFINED,
|
|
98
98
|
nullable: bool = False,
|
|
99
99
|
keyword_only: bool = False,
|
|
100
|
+
treat_dot_as_module: bool = False,
|
|
100
101
|
) -> None:
|
|
101
102
|
super().__init__(
|
|
102
103
|
reference=reference,
|
|
@@ -112,6 +113,7 @@ class Struct(DataModel):
|
|
|
112
113
|
default=default,
|
|
113
114
|
nullable=nullable,
|
|
114
115
|
keyword_only=keyword_only,
|
|
116
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
115
117
|
)
|
|
116
118
|
self.extra_template_data.setdefault("base_class_kwargs", {})
|
|
117
119
|
if self.keyword_only:
|
|
@@ -287,6 +289,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
287
289
|
use_union_operator: bool = False, # noqa: FBT001, FBT002
|
|
288
290
|
use_pendulum: bool = False, # noqa: FBT001, FBT002
|
|
289
291
|
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
292
|
+
treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
|
|
290
293
|
) -> None:
|
|
291
294
|
super().__init__(
|
|
292
295
|
python_version,
|
|
@@ -297,6 +300,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
297
300
|
use_union_operator,
|
|
298
301
|
use_pendulum,
|
|
299
302
|
target_datetime_class,
|
|
303
|
+
treat_dot_as_module,
|
|
300
304
|
)
|
|
301
305
|
|
|
302
306
|
datetime_map = (
|
|
@@ -213,6 +213,7 @@ class BaseModelBase(DataModel, ABC):
|
|
|
213
213
|
default: Any = UNDEFINED,
|
|
214
214
|
nullable: bool = False,
|
|
215
215
|
keyword_only: bool = False,
|
|
216
|
+
treat_dot_as_module: bool = False,
|
|
216
217
|
) -> None:
|
|
217
218
|
methods: list[str] = [field.method for field in fields if field.method]
|
|
218
219
|
|
|
@@ -230,6 +231,7 @@ class BaseModelBase(DataModel, ABC):
|
|
|
230
231
|
default=default,
|
|
231
232
|
nullable=nullable,
|
|
232
233
|
keyword_only=keyword_only,
|
|
234
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
233
235
|
)
|
|
234
236
|
|
|
235
237
|
@cached_property
|
|
@@ -263,6 +265,7 @@ class BaseModel(BaseModelBase):
|
|
|
263
265
|
default: Any = UNDEFINED,
|
|
264
266
|
nullable: bool = False,
|
|
265
267
|
keyword_only: bool = False,
|
|
268
|
+
treat_dot_as_module: bool = False,
|
|
266
269
|
) -> None:
|
|
267
270
|
super().__init__(
|
|
268
271
|
reference=reference,
|
|
@@ -277,6 +280,7 @@ class BaseModel(BaseModelBase):
|
|
|
277
280
|
default=default,
|
|
278
281
|
nullable=nullable,
|
|
279
282
|
keyword_only=keyword_only,
|
|
283
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
280
284
|
)
|
|
281
285
|
config_parameters: dict[str, Any] = {}
|
|
282
286
|
|
|
@@ -166,6 +166,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
166
166
|
use_union_operator: bool = False, # noqa: FBT001, FBT002
|
|
167
167
|
use_pendulum: bool = False, # noqa: FBT001, FBT002
|
|
168
168
|
target_datetime_class: DatetimeClassType | None = None,
|
|
169
|
+
treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
|
|
169
170
|
) -> None:
|
|
170
171
|
super().__init__(
|
|
171
172
|
python_version,
|
|
@@ -176,6 +177,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
176
177
|
use_union_operator,
|
|
177
178
|
use_pendulum,
|
|
178
179
|
target_datetime_class,
|
|
180
|
+
treat_dot_as_module,
|
|
179
181
|
)
|
|
180
182
|
|
|
181
183
|
self.type_map: dict[Types, DataType] = self.type_map_factory(
|
|
@@ -179,6 +179,7 @@ class BaseModel(BaseModelBase):
|
|
|
179
179
|
default: Any = UNDEFINED,
|
|
180
180
|
nullable: bool = False,
|
|
181
181
|
keyword_only: bool = False,
|
|
182
|
+
treat_dot_as_module: bool = False,
|
|
182
183
|
) -> None:
|
|
183
184
|
super().__init__(
|
|
184
185
|
reference=reference,
|
|
@@ -193,6 +194,7 @@ class BaseModel(BaseModelBase):
|
|
|
193
194
|
default=default,
|
|
194
195
|
nullable=nullable,
|
|
195
196
|
keyword_only=keyword_only,
|
|
197
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
196
198
|
)
|
|
197
199
|
config_parameters: dict[str, Any] = {}
|
|
198
200
|
|
|
@@ -50,6 +50,7 @@ class DataTypeScalar(DataModel):
|
|
|
50
50
|
default: Any = UNDEFINED,
|
|
51
51
|
nullable: bool = False,
|
|
52
52
|
keyword_only: bool = False,
|
|
53
|
+
treat_dot_as_module: bool = False,
|
|
53
54
|
) -> None:
|
|
54
55
|
extra_template_data = extra_template_data or defaultdict(dict)
|
|
55
56
|
|
|
@@ -78,4 +79,5 @@ class DataTypeScalar(DataModel):
|
|
|
78
79
|
default=default,
|
|
79
80
|
nullable=nullable,
|
|
80
81
|
keyword_only=keyword_only,
|
|
82
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
81
83
|
)
|
|
@@ -60,6 +60,7 @@ class TypedDict(DataModel):
|
|
|
60
60
|
default: Any = UNDEFINED,
|
|
61
61
|
nullable: bool = False,
|
|
62
62
|
keyword_only: bool = False,
|
|
63
|
+
treat_dot_as_module: bool = False,
|
|
63
64
|
) -> None:
|
|
64
65
|
super().__init__(
|
|
65
66
|
reference=reference,
|
|
@@ -75,6 +76,7 @@ class TypedDict(DataModel):
|
|
|
75
76
|
default=default,
|
|
76
77
|
nullable=nullable,
|
|
77
78
|
keyword_only=keyword_only,
|
|
79
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
78
80
|
)
|
|
79
81
|
|
|
80
82
|
@property
|
|
@@ -68,6 +68,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
68
68
|
use_union_operator: bool = False, # noqa: FBT001, FBT002
|
|
69
69
|
use_pendulum: bool = False, # noqa: FBT001, FBT002
|
|
70
70
|
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
71
|
+
treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
|
|
71
72
|
) -> None:
|
|
72
73
|
super().__init__(
|
|
73
74
|
python_version,
|
|
@@ -78,6 +79,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
78
79
|
use_union_operator,
|
|
79
80
|
use_pendulum,
|
|
80
81
|
target_datetime_class,
|
|
82
|
+
treat_dot_as_module,
|
|
81
83
|
)
|
|
82
84
|
|
|
83
85
|
self.type_map: dict[Types, DataType] = type_map_factory(self.data_type)
|
|
@@ -37,6 +37,7 @@ class DataTypeUnion(DataModel):
|
|
|
37
37
|
default: Any = UNDEFINED,
|
|
38
38
|
nullable: bool = False,
|
|
39
39
|
keyword_only: bool = False,
|
|
40
|
+
treat_dot_as_module: bool = False,
|
|
40
41
|
) -> None:
|
|
41
42
|
super().__init__(
|
|
42
43
|
reference=reference,
|
|
@@ -52,4 +53,5 @@ class DataTypeUnion(DataModel):
|
|
|
52
53
|
default=default,
|
|
53
54
|
nullable=nullable,
|
|
54
55
|
keyword_only=keyword_only,
|
|
56
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
55
57
|
)
|
|
@@ -7,7 +7,7 @@ from abc import ABC, abstractmethod
|
|
|
7
7
|
from collections import OrderedDict, defaultdict
|
|
8
8
|
from itertools import groupby
|
|
9
9
|
from pathlib import Path
|
|
10
|
-
from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, Protocol, TypeVar, runtime_checkable
|
|
10
|
+
from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, Protocol, TypeVar, cast, runtime_checkable
|
|
11
11
|
from urllib.parse import ParseResult
|
|
12
12
|
|
|
13
13
|
from pydantic import BaseModel
|
|
@@ -370,7 +370,7 @@ class Parser(ABC):
|
|
|
370
370
|
custom_formatters_kwargs: dict[str, Any] | None = None,
|
|
371
371
|
use_pendulum: bool = False,
|
|
372
372
|
http_query_parameters: Sequence[tuple[str, str]] | None = None,
|
|
373
|
-
|
|
373
|
+
treat_dot_as_module: bool = False,
|
|
374
374
|
use_exact_imports: bool = False,
|
|
375
375
|
default_field_extras: dict[str, Any] | None = None,
|
|
376
376
|
target_datetime_class: DatetimeClassType | None = DatetimeClassType.Datetime,
|
|
@@ -387,6 +387,7 @@ class Parser(ABC):
|
|
|
387
387
|
use_union_operator=use_union_operator,
|
|
388
388
|
use_pendulum=use_pendulum,
|
|
389
389
|
target_datetime_class=target_datetime_class,
|
|
390
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
390
391
|
)
|
|
391
392
|
self.data_model_type: type[DataModel] = data_model_type
|
|
392
393
|
self.data_model_root_type: type[DataModel] = data_model_root_type
|
|
@@ -482,7 +483,7 @@ class Parser(ABC):
|
|
|
482
483
|
self.known_third_party = known_third_party
|
|
483
484
|
self.custom_formatter = custom_formatters
|
|
484
485
|
self.custom_formatters_kwargs = custom_formatters_kwargs
|
|
485
|
-
self.
|
|
486
|
+
self.treat_dot_as_module = treat_dot_as_module
|
|
486
487
|
self.default_field_extras: dict[str, Any] | None = default_field_extras
|
|
487
488
|
self.formatters: list[Formatter] = formatters
|
|
488
489
|
|
|
@@ -666,7 +667,7 @@ class Parser(ABC):
|
|
|
666
667
|
if (
|
|
667
668
|
len(model.module_path) > 1
|
|
668
669
|
and model.module_path[-1].count(".") > 0
|
|
669
|
-
and not self.
|
|
670
|
+
and not self.treat_dot_as_module
|
|
670
671
|
):
|
|
671
672
|
rel_path_depth = model.module_path[-1].count(".")
|
|
672
673
|
from_ = from_[rel_path_depth:]
|
|
@@ -727,6 +728,8 @@ class Parser(ABC):
|
|
|
727
728
|
property_name = discriminator.get("propertyName")
|
|
728
729
|
if not property_name: # pragma: no cover
|
|
729
730
|
continue
|
|
731
|
+
field_name, alias = self.model_resolver.get_valid_field_name_and_alias(field_name=property_name)
|
|
732
|
+
discriminator["propertyName"] = field_name
|
|
730
733
|
mapping = discriminator.get("mapping", {})
|
|
731
734
|
for data_type in field.data_type.data_types:
|
|
732
735
|
if not data_type.reference: # pragma: no cover
|
|
@@ -778,13 +781,13 @@ class Parser(ABC):
|
|
|
778
781
|
raise RuntimeError(msg)
|
|
779
782
|
has_one_literal = False
|
|
780
783
|
for discriminator_field in discriminator_model.fields:
|
|
781
|
-
if
|
|
784
|
+
if field_name not in {discriminator_field.original_name, discriminator_field.name}:
|
|
782
785
|
continue
|
|
783
786
|
literals = discriminator_field.data_type.literals
|
|
784
787
|
if len(literals) == 1 and literals[0] == (type_names[0] if type_names else None):
|
|
785
788
|
has_one_literal = True
|
|
786
789
|
if isinstance(discriminator_model, msgspec_model.Struct): # pragma: no cover
|
|
787
|
-
discriminator_model.add_base_class_kwarg("tag_field", f"'{
|
|
790
|
+
discriminator_model.add_base_class_kwarg("tag_field", f"'{field_name}'")
|
|
788
791
|
discriminator_model.add_base_class_kwarg("tag", discriminator_field.represented_default)
|
|
789
792
|
discriminator_field.extras["is_classvar"] = True
|
|
790
793
|
# Found the discriminator field, no need to keep looking
|
|
@@ -800,9 +803,10 @@ class Parser(ABC):
|
|
|
800
803
|
if not has_one_literal:
|
|
801
804
|
discriminator_model.fields.append(
|
|
802
805
|
self.data_model_field_type(
|
|
803
|
-
name=
|
|
806
|
+
name=field_name,
|
|
804
807
|
data_type=self.data_type(literals=type_names),
|
|
805
808
|
required=True,
|
|
809
|
+
alias=alias,
|
|
806
810
|
)
|
|
807
811
|
)
|
|
808
812
|
has_imported_literal = any(import_ == IMPORT_LITERAL for import_ in imports)
|
|
@@ -1102,6 +1106,24 @@ class Parser(ABC):
|
|
|
1102
1106
|
models[i], models[i + 1] = models[i + 1], model
|
|
1103
1107
|
changed = True
|
|
1104
1108
|
|
|
1109
|
+
def __change_field_name(
|
|
1110
|
+
self,
|
|
1111
|
+
models: list[DataModel],
|
|
1112
|
+
) -> None:
|
|
1113
|
+
if self.data_model_type != pydantic_model_v2.BaseModel:
|
|
1114
|
+
return
|
|
1115
|
+
for model in models:
|
|
1116
|
+
for field in model.fields:
|
|
1117
|
+
filed_name = field.name
|
|
1118
|
+
filed_name_resolver = ModelResolver(snake_case_field=self.snake_case_field, remove_suffix_number=True)
|
|
1119
|
+
for data_type in field.data_type.all_data_types:
|
|
1120
|
+
if data_type.reference:
|
|
1121
|
+
filed_name_resolver.exclude_names.add(data_type.reference.short_name)
|
|
1122
|
+
new_filed_name = filed_name_resolver.add(["field"], cast("str", filed_name)).name
|
|
1123
|
+
if filed_name != new_filed_name:
|
|
1124
|
+
field.alias = filed_name
|
|
1125
|
+
field.name = new_filed_name
|
|
1126
|
+
|
|
1105
1127
|
def __set_one_literal_on_default(self, models: list[DataModel]) -> None:
|
|
1106
1128
|
if not self.use_one_literal_as_default:
|
|
1107
1129
|
return
|
|
@@ -1265,6 +1287,7 @@ class Parser(ABC):
|
|
|
1265
1287
|
self.__collapse_root_models(models, unused_models, imports, scoped_model_resolver)
|
|
1266
1288
|
self.__set_default_enum_member(models)
|
|
1267
1289
|
self.__sort_models(models, imports)
|
|
1290
|
+
self.__change_field_name(models)
|
|
1268
1291
|
self.__apply_discriminator_type(models, imports)
|
|
1269
1292
|
self.__set_one_literal_on_default(models)
|
|
1270
1293
|
|
|
@@ -1328,7 +1351,7 @@ class Parser(ABC):
|
|
|
1328
1351
|
results = {tuple(i.replace("-", "_") for i in k): v for k, v in results.items()}
|
|
1329
1352
|
return (
|
|
1330
1353
|
self.__postprocess_result_modules(results)
|
|
1331
|
-
if self.
|
|
1354
|
+
if self.treat_dot_as_module
|
|
1332
1355
|
else {
|
|
1333
1356
|
tuple((part[: part.rfind(".")].replace(".", "_") + part[part.rfind(".") :]) for part in k): v
|
|
1334
1357
|
for k, v in results.items()
|
|
@@ -147,7 +147,7 @@ class GraphQLParser(Parser):
|
|
|
147
147
|
custom_formatters_kwargs: dict[str, Any] | None = None,
|
|
148
148
|
use_pendulum: bool = False,
|
|
149
149
|
http_query_parameters: Sequence[tuple[str, str]] | None = None,
|
|
150
|
-
|
|
150
|
+
treat_dot_as_module: bool = False,
|
|
151
151
|
use_exact_imports: bool = False,
|
|
152
152
|
default_field_extras: dict[str, Any] | None = None,
|
|
153
153
|
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
@@ -221,7 +221,7 @@ class GraphQLParser(Parser):
|
|
|
221
221
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
222
222
|
use_pendulum=use_pendulum,
|
|
223
223
|
http_query_parameters=http_query_parameters,
|
|
224
|
-
|
|
224
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
225
225
|
use_exact_imports=use_exact_imports,
|
|
226
226
|
default_field_extras=default_field_extras,
|
|
227
227
|
target_datetime_class=target_datetime_class,
|
|
@@ -453,6 +453,7 @@ class GraphQLParser(Parser):
|
|
|
453
453
|
path=self.current_source_path,
|
|
454
454
|
description=obj.description,
|
|
455
455
|
keyword_only=self.keyword_only,
|
|
456
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
456
457
|
)
|
|
457
458
|
self.results.append(data_model_type)
|
|
458
459
|
|
|
@@ -416,7 +416,7 @@ class JsonSchemaParser(Parser):
|
|
|
416
416
|
custom_formatters_kwargs: dict[str, Any] | None = None,
|
|
417
417
|
use_pendulum: bool = False,
|
|
418
418
|
http_query_parameters: Sequence[tuple[str, str]] | None = None,
|
|
419
|
-
|
|
419
|
+
treat_dot_as_module: bool = False,
|
|
420
420
|
use_exact_imports: bool = False,
|
|
421
421
|
default_field_extras: dict[str, Any] | None = None,
|
|
422
422
|
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
@@ -490,7 +490,7 @@ class JsonSchemaParser(Parser):
|
|
|
490
490
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
491
491
|
use_pendulum=use_pendulum,
|
|
492
492
|
http_query_parameters=http_query_parameters,
|
|
493
|
-
|
|
493
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
494
494
|
use_exact_imports=use_exact_imports,
|
|
495
495
|
default_field_extras=default_field_extras,
|
|
496
496
|
target_datetime_class=target_datetime_class,
|
|
@@ -695,7 +695,11 @@ class JsonSchemaParser(Parser):
|
|
|
695
695
|
required: list[str],
|
|
696
696
|
) -> DataType:
|
|
697
697
|
if obj.properties:
|
|
698
|
-
fields.extend(
|
|
698
|
+
fields.extend(
|
|
699
|
+
self.parse_object_fields(
|
|
700
|
+
obj, path, get_module_name(name, None, treat_dot_as_module=self.treat_dot_as_module)
|
|
701
|
+
)
|
|
702
|
+
)
|
|
699
703
|
# ignore an undetected object
|
|
700
704
|
if ignore_duplicate_model and not fields and len(base_classes) == 1:
|
|
701
705
|
with self.model_resolver.current_base_path_context(self.model_resolver._base_path): # noqa: SLF001
|
|
@@ -737,6 +741,7 @@ class JsonSchemaParser(Parser):
|
|
|
737
741
|
path=self.current_source_path,
|
|
738
742
|
description=obj.description if self.use_schema_description else None,
|
|
739
743
|
keyword_only=self.keyword_only,
|
|
744
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
740
745
|
)
|
|
741
746
|
self.results.append(data_model_type)
|
|
742
747
|
|
|
@@ -756,7 +761,7 @@ class JsonSchemaParser(Parser):
|
|
|
756
761
|
if all_of_item.ref: # $ref
|
|
757
762
|
base_classes.append(self.model_resolver.add_ref(all_of_item.ref))
|
|
758
763
|
else:
|
|
759
|
-
module_name = get_module_name(name, None)
|
|
764
|
+
module_name = get_module_name(name, None, treat_dot_as_module=self.treat_dot_as_module)
|
|
760
765
|
object_fields = self.parse_object_fields(
|
|
761
766
|
all_of_item,
|
|
762
767
|
path,
|
|
@@ -849,6 +854,7 @@ class JsonSchemaParser(Parser):
|
|
|
849
854
|
path=self.current_source_path,
|
|
850
855
|
description=obj.description if self.use_schema_description else None,
|
|
851
856
|
nullable=obj.type_has_null,
|
|
857
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
852
858
|
)
|
|
853
859
|
self.results.append(data_model_root)
|
|
854
860
|
return self.data_type(reference=reference)
|
|
@@ -935,7 +941,9 @@ class JsonSchemaParser(Parser):
|
|
|
935
941
|
)
|
|
936
942
|
class_name = reference.name
|
|
937
943
|
self.set_title(class_name, obj)
|
|
938
|
-
fields = self.parse_object_fields(
|
|
944
|
+
fields = self.parse_object_fields(
|
|
945
|
+
obj, path, get_module_name(class_name, None, treat_dot_as_module=self.treat_dot_as_module)
|
|
946
|
+
)
|
|
939
947
|
if fields or not isinstance(obj.additionalProperties, JsonSchemaObject):
|
|
940
948
|
data_model_type_class = self.data_model_type
|
|
941
949
|
else:
|
|
@@ -972,6 +980,7 @@ class JsonSchemaParser(Parser):
|
|
|
972
980
|
description=obj.description if self.use_schema_description else None,
|
|
973
981
|
nullable=obj.type_has_null,
|
|
974
982
|
keyword_only=self.keyword_only,
|
|
983
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
975
984
|
)
|
|
976
985
|
self.results.append(data_model_type)
|
|
977
986
|
return self.data_type(reference=reference)
|
|
@@ -1185,6 +1194,7 @@ class JsonSchemaParser(Parser):
|
|
|
1185
1194
|
path=self.current_source_path,
|
|
1186
1195
|
description=obj.description if self.use_schema_description else None,
|
|
1187
1196
|
nullable=obj.type_has_null,
|
|
1197
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
1188
1198
|
)
|
|
1189
1199
|
self.results.append(data_model_root)
|
|
1190
1200
|
return self.data_type(reference=reference)
|
|
@@ -1264,6 +1274,8 @@ class JsonSchemaParser(Parser):
|
|
|
1264
1274
|
extra_template_data=self.extra_template_data,
|
|
1265
1275
|
path=self.current_source_path,
|
|
1266
1276
|
nullable=obj.type_has_null,
|
|
1277
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
1278
|
+
default=obj.default if obj.has_default else UNDEFINED,
|
|
1267
1279
|
)
|
|
1268
1280
|
self.results.append(data_model_root_type)
|
|
1269
1281
|
return self.data_type(reference=reference)
|
|
@@ -1386,6 +1398,7 @@ class JsonSchemaParser(Parser):
|
|
|
1386
1398
|
path=self.current_source_path,
|
|
1387
1399
|
default=obj.default if obj.has_default else UNDEFINED,
|
|
1388
1400
|
nullable=obj.type_has_null,
|
|
1401
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
1389
1402
|
)
|
|
1390
1403
|
self.results.append(data_model_root_type)
|
|
1391
1404
|
return self.data_type(reference=reference)
|
|
@@ -209,7 +209,7 @@ class OpenAPIParser(JsonSchemaParser):
|
|
|
209
209
|
custom_formatters_kwargs: dict[str, Any] | None = None,
|
|
210
210
|
use_pendulum: bool = False,
|
|
211
211
|
http_query_parameters: Sequence[tuple[str, str]] | None = None,
|
|
212
|
-
|
|
212
|
+
treat_dot_as_module: bool = False,
|
|
213
213
|
use_exact_imports: bool = False,
|
|
214
214
|
default_field_extras: dict[str, Any] | None = None,
|
|
215
215
|
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
@@ -283,7 +283,7 @@ class OpenAPIParser(JsonSchemaParser):
|
|
|
283
283
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
284
284
|
use_pendulum=use_pendulum,
|
|
285
285
|
http_query_parameters=http_query_parameters,
|
|
286
|
-
|
|
286
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
287
287
|
use_exact_imports=use_exact_imports,
|
|
288
288
|
default_field_extras=default_field_extras,
|
|
289
289
|
target_datetime_class=target_datetime_class,
|
|
@@ -488,6 +488,7 @@ class OpenAPIParser(JsonSchemaParser):
|
|
|
488
488
|
custom_base_class=self.base_class,
|
|
489
489
|
custom_template_dir=self.custom_template_dir,
|
|
490
490
|
keyword_only=self.keyword_only,
|
|
491
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
491
492
|
)
|
|
492
493
|
)
|
|
493
494
|
|
|
@@ -317,6 +317,7 @@ class ModelResolver: # noqa: PLR0904
|
|
|
317
317
|
remove_special_field_name_prefix: bool = False, # noqa: FBT001, FBT002
|
|
318
318
|
capitalise_enum_members: bool = False, # noqa: FBT001, FBT002
|
|
319
319
|
no_alias: bool = False, # noqa: FBT001, FBT002
|
|
320
|
+
remove_suffix_number: bool = False, # noqa: FBT001, FBT002
|
|
320
321
|
) -> None:
|
|
321
322
|
self.references: dict[str, Reference] = {}
|
|
322
323
|
self._current_root: Sequence[str] = []
|
|
@@ -349,6 +350,7 @@ class ModelResolver: # noqa: PLR0904
|
|
|
349
350
|
self.class_name_generator = custom_class_name_generator or self.default_class_name_generator
|
|
350
351
|
self._base_path: Path = base_path or Path.cwd()
|
|
351
352
|
self._current_base_path: Path | None = self._base_path
|
|
353
|
+
self.remove_suffix_number: bool = remove_suffix_number
|
|
352
354
|
|
|
353
355
|
@property
|
|
354
356
|
def current_base_path(self) -> Path | None:
|
|
@@ -615,7 +617,7 @@ class ModelResolver: # noqa: PLR0904
|
|
|
615
617
|
|
|
616
618
|
def _get_unique_name(self, name: str, camel: bool = False) -> str: # noqa: FBT001, FBT002
|
|
617
619
|
unique_name: str = name
|
|
618
|
-
count: int = 1
|
|
620
|
+
count: int = 0 if self.remove_suffix_number else 1
|
|
619
621
|
reference_names = {r.name for r in self.references.values()} | self.exclude_names
|
|
620
622
|
while unique_name in reference_names:
|
|
621
623
|
if self.duplicate_name_suffix:
|
|
@@ -627,7 +629,7 @@ class ModelResolver: # noqa: PLR0904
|
|
|
627
629
|
else:
|
|
628
630
|
name_parts = [name, count]
|
|
629
631
|
delimiter = "" if camel else "_"
|
|
630
|
-
unique_name = delimiter.join(str(p) for p in name_parts if p)
|
|
632
|
+
unique_name = delimiter.join(str(p) for p in name_parts if p) if count else name
|
|
631
633
|
count += 1
|
|
632
634
|
return unique_name
|
|
633
635
|
|
|
@@ -167,26 +167,67 @@ def _remove_none_from_type(type_: str, split_pattern: Pattern[str], delimiter: s
|
|
|
167
167
|
return types
|
|
168
168
|
|
|
169
169
|
|
|
170
|
-
def _remove_none_from_union(type_: str, use_union_operator: bool) -> str: # noqa:
|
|
170
|
+
def _remove_none_from_union(type_: str, *, use_union_operator: bool) -> str: # noqa: PLR0911, PLR0912
|
|
171
171
|
if use_union_operator:
|
|
172
|
-
if
|
|
172
|
+
if " | " not in type_:
|
|
173
173
|
return type_
|
|
174
|
-
return UNION_OPERATOR_DELIMITER.join(
|
|
175
|
-
_remove_none_from_type(type_, UNION_OPERATOR_PATTERN, UNION_OPERATOR_DELIMITER)
|
|
176
|
-
)
|
|
177
174
|
|
|
175
|
+
# Process each part of the union
|
|
176
|
+
parts = UNION_OPERATOR_PATTERN.split(type_)
|
|
177
|
+
processed_parts = []
|
|
178
|
+
for part in parts:
|
|
179
|
+
if part == NONE:
|
|
180
|
+
continue
|
|
181
|
+
|
|
182
|
+
# Check if this part contains a nested union
|
|
183
|
+
processed_part = _remove_none_from_union(part, use_union_operator=True) if " | " in part else part
|
|
184
|
+
processed_parts.append(processed_part)
|
|
185
|
+
|
|
186
|
+
if not processed_parts:
|
|
187
|
+
return NONE
|
|
188
|
+
|
|
189
|
+
return UNION_OPERATOR_DELIMITER.join(processed_parts)
|
|
178
190
|
if not type_.startswith(UNION_PREFIX):
|
|
179
191
|
return type_
|
|
180
|
-
inner_types = _remove_none_from_type(type_[len(UNION_PREFIX) :][:-1], UNION_PATTERN, UNION_DELIMITER)
|
|
181
192
|
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
193
|
+
inner_text = type_[len(UNION_PREFIX) : -1]
|
|
194
|
+
parts = []
|
|
195
|
+
inner_count = 0
|
|
196
|
+
current_part = ""
|
|
197
|
+
|
|
198
|
+
# Parse union parts carefully to handle nested structures
|
|
199
|
+
for char in inner_text:
|
|
200
|
+
current_part += char
|
|
201
|
+
if char == "[":
|
|
202
|
+
inner_count += 1
|
|
203
|
+
elif char == "]":
|
|
204
|
+
inner_count -= 1
|
|
205
|
+
elif char == "," and inner_count == 0:
|
|
206
|
+
part = current_part[:-1].strip()
|
|
207
|
+
if part != NONE:
|
|
208
|
+
# Process nested unions recursively
|
|
209
|
+
if part.startswith(UNION_PREFIX):
|
|
210
|
+
part = _remove_none_from_union(part, use_union_operator=False)
|
|
211
|
+
parts.append(part)
|
|
212
|
+
current_part = ""
|
|
213
|
+
|
|
214
|
+
part = current_part.strip()
|
|
215
|
+
if current_part and part != NONE:
|
|
216
|
+
if part.startswith(UNION_PREFIX):
|
|
217
|
+
part = _remove_none_from_union(part, use_union_operator=False)
|
|
218
|
+
parts.append(part)
|
|
219
|
+
|
|
220
|
+
if not parts:
|
|
221
|
+
return NONE
|
|
222
|
+
if len(parts) == 1:
|
|
223
|
+
return parts[0]
|
|
224
|
+
|
|
225
|
+
return f"{UNION_PREFIX}{UNION_DELIMITER.join(parts)}]"
|
|
185
226
|
|
|
186
227
|
|
|
187
228
|
@lru_cache
|
|
188
229
|
def get_optional_type(type_: str, use_union_operator: bool) -> str: # noqa: FBT001
|
|
189
|
-
type_ = _remove_none_from_union(type_, use_union_operator)
|
|
230
|
+
type_ = _remove_none_from_union(type_, use_union_operator=use_union_operator)
|
|
190
231
|
|
|
191
232
|
if not type_ or type_ == NONE:
|
|
192
233
|
return NONE
|
|
@@ -249,6 +290,7 @@ class DataType(_BaseModel):
|
|
|
249
290
|
children: list[Any] = [] # noqa: RUF012
|
|
250
291
|
strict: bool = False
|
|
251
292
|
dict_key: Optional[DataType] = None # noqa: UP045
|
|
293
|
+
treat_dot_as_module: bool = False
|
|
252
294
|
|
|
253
295
|
_exclude_fields: ClassVar[set[str]] = {"parent", "children"}
|
|
254
296
|
_pass_fields: ClassVar[set[str]] = {"parent", "children", "data_types", "reference"}
|
|
@@ -402,7 +444,9 @@ class DataType(_BaseModel):
|
|
|
402
444
|
self.is_optional = True
|
|
403
445
|
continue
|
|
404
446
|
|
|
405
|
-
non_optional_data_type_type = _remove_none_from_union(
|
|
447
|
+
non_optional_data_type_type = _remove_none_from_union(
|
|
448
|
+
data_type_type, use_union_operator=self.use_union_operator
|
|
449
|
+
)
|
|
406
450
|
|
|
407
451
|
if non_optional_data_type_type != data_type_type:
|
|
408
452
|
self.is_optional = True
|
|
@@ -526,6 +570,7 @@ class DataTypeManager(ABC):
|
|
|
526
570
|
use_union_operator: bool = False, # noqa: FBT001, FBT002
|
|
527
571
|
use_pendulum: bool = False, # noqa: FBT001, FBT002
|
|
528
572
|
target_datetime_class: DatetimeClassType | None = None,
|
|
573
|
+
treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
|
|
529
574
|
) -> None:
|
|
530
575
|
self.python_version = python_version
|
|
531
576
|
self.use_standard_collections: bool = use_standard_collections
|
|
@@ -537,6 +582,7 @@ class DataTypeManager(ABC):
|
|
|
537
582
|
self.use_union_operator: bool = use_union_operator
|
|
538
583
|
self.use_pendulum: bool = use_pendulum
|
|
539
584
|
self.target_datetime_class: DatetimeClassType = target_datetime_class or DatetimeClassType.Datetime
|
|
585
|
+
self.treat_dot_as_module: bool = treat_dot_as_module
|
|
540
586
|
|
|
541
587
|
if TYPE_CHECKING:
|
|
542
588
|
self.data_type: type[DataType]
|
|
@@ -547,6 +593,7 @@ class DataTypeManager(ABC):
|
|
|
547
593
|
use_standard_collections=(bool, use_standard_collections),
|
|
548
594
|
use_generic_container=(bool, use_generic_container_types),
|
|
549
595
|
use_union_operator=(bool, use_union_operator),
|
|
596
|
+
treat_dot_as_module=(bool, treat_dot_as_module),
|
|
550
597
|
__base__=DataType,
|
|
551
598
|
)
|
|
552
599
|
|
{datamodel_code_generator-0.28.4.dist-info → datamodel_code_generator-0.28.5.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: datamodel-code-generator
|
|
3
|
-
Version: 0.28.
|
|
3
|
+
Version: 0.28.5
|
|
4
4
|
Summary: Datamodel Code Generator
|
|
5
5
|
Project-URL: Homepage, https://github.com/koxudaxi/datamodel-code-generator
|
|
6
6
|
Project-URL: Source, https://github.com/koxudaxi/datamodel-code-generator
|
{datamodel_code_generator-0.28.4.dist-info → datamodel_code_generator-0.28.5.dist-info}/RECORD
RENAMED
|
@@ -1,33 +1,33 @@
|
|
|
1
|
-
datamodel_code_generator/__init__.py,sha256=
|
|
2
|
-
datamodel_code_generator/__main__.py,sha256=
|
|
1
|
+
datamodel_code_generator/__init__.py,sha256=sguYwixd5tfTNpa0TwfKFiYqEgwunUaVY2Aea7mCavk,20327
|
|
2
|
+
datamodel_code_generator/__main__.py,sha256=LMD988WjkqmzWjHXoNiiEI3KFnrq3kupbyiObDf1HpA,22137
|
|
3
3
|
datamodel_code_generator/arguments.py,sha256=LQyCC7tsDdy7ie7nbQVVW_79usVumX8O5pd99ZZ51ds,16466
|
|
4
4
|
datamodel_code_generator/format.py,sha256=zvX0KH1uWwGnTYoVM4KhAuKZn5erjkH5eyi4t3leirw,8962
|
|
5
5
|
datamodel_code_generator/http.py,sha256=LE94GC7I9D8lWIg_YAGWedfy0XNxOXTmiYKuNMTwouo,887
|
|
6
6
|
datamodel_code_generator/imports.py,sha256=Nq83WbEGCegntg3WX4VbKfzAIs84alZ7IrYyNPrlUbc,5517
|
|
7
7
|
datamodel_code_generator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
8
|
datamodel_code_generator/pydantic_patch.py,sha256=co1IUDvZqQ-xEZ3C9gbV-BVm2Cin1vfyZNr2Dr0LdHY,718
|
|
9
|
-
datamodel_code_generator/reference.py,sha256=
|
|
10
|
-
datamodel_code_generator/types.py,sha256=
|
|
9
|
+
datamodel_code_generator/reference.py,sha256=cnD6gGevRomez1dr16-KBz-GgDN8gCMQZqpwmb16CwA,26021
|
|
10
|
+
datamodel_code_generator/types.py,sha256=fobUZnNTOGpzF4qZMraoLogVkAU7zBdFVG-8SOFoDD4,21163
|
|
11
11
|
datamodel_code_generator/util.py,sha256=mZW8-6CbFe6T4IY5OM9Av6cH-0VknQGe2eIKjTM6Jzo,2729
|
|
12
12
|
datamodel_code_generator/model/__init__.py,sha256=pJlJ1juQ-Gv17ZKXy6OAfJSSoOAmYQ7QCbdneu1BENU,3594
|
|
13
|
-
datamodel_code_generator/model/base.py,sha256=
|
|
14
|
-
datamodel_code_generator/model/dataclass.py,sha256=
|
|
15
|
-
datamodel_code_generator/model/enum.py,sha256=
|
|
13
|
+
datamodel_code_generator/model/base.py,sha256=Ma28Vx1p5zsFt23BMCs1UNDXuBq_USEhuPUvaeFLkS4,14959
|
|
14
|
+
datamodel_code_generator/model/dataclass.py,sha256=t4NtgVhopTtEDUJCqijJheVlewSt8IJzQhQb8gXspfs,6252
|
|
15
|
+
datamodel_code_generator/model/enum.py,sha256=yriQslY1hag_Qk-Xv3vl_LkPnbmMZ3iRTAGiiyMN0Io,4003
|
|
16
16
|
datamodel_code_generator/model/imports.py,sha256=PTc09UzIBSsa5yAPoieb6hCGIohU2T1Y7igNy_pYarg,820
|
|
17
|
-
datamodel_code_generator/model/msgspec.py,sha256=
|
|
17
|
+
datamodel_code_generator/model/msgspec.py,sha256=qL2DIEwBfpn-vd8p8KEmUViMUce6RgI4Ql-drOmPR7M,11845
|
|
18
18
|
datamodel_code_generator/model/rootmodel.py,sha256=pY8G2SPjkafzfJ1L9P5sNdp8qe45UclpUYN86guRB3M,202
|
|
19
|
-
datamodel_code_generator/model/scalar.py,sha256=
|
|
20
|
-
datamodel_code_generator/model/typed_dict.py,sha256=
|
|
21
|
-
datamodel_code_generator/model/types.py,sha256=
|
|
22
|
-
datamodel_code_generator/model/union.py,sha256=
|
|
19
|
+
datamodel_code_generator/model/scalar.py,sha256=xfONEK30eYJ2mSL9PK9zXqEG5-xApYMI_gmKOn5qhK4,2664
|
|
20
|
+
datamodel_code_generator/model/typed_dict.py,sha256=FJi_fEZWuFe3nvidfl-jqr9PMRFTvfusoEFQkx1BqHI,4685
|
|
21
|
+
datamodel_code_generator/model/types.py,sha256=ZyEwi76EBI5RS5JfoNUoRHmjOAZDup8oNFvQQDizfwQ,3502
|
|
22
|
+
datamodel_code_generator/model/union.py,sha256=zwq1ayGFW3KbI4SxPCcdZcrM7X4Px25IdujDedtwgOw,1929
|
|
23
23
|
datamodel_code_generator/model/pydantic/__init__.py,sha256=CtyzSriGEYGp1yfHapjD5lrS2vkSNe8AqKSYO-XaRWc,1095
|
|
24
|
-
datamodel_code_generator/model/pydantic/base_model.py,sha256=
|
|
24
|
+
datamodel_code_generator/model/pydantic/base_model.py,sha256=qlGTxWYPS3XsHYwrRKjJDHvCixAxpWMEXTPtyAVXb6g,12182
|
|
25
25
|
datamodel_code_generator/model/pydantic/custom_root_type.py,sha256=VJpEAmGFe3TzMKrR5YvR7PJ3pfGHcYytO1zhQrWyoWg,299
|
|
26
26
|
datamodel_code_generator/model/pydantic/dataclass.py,sha256=jgjkqQk71CQP4RbTcPGSEOQDNqjTQnzFavvl5LjWTBw,455
|
|
27
27
|
datamodel_code_generator/model/pydantic/imports.py,sha256=nWPiLgDeYNPHcAs8M-gaUUZg1daQRHdBPpjYuX3b5u4,2225
|
|
28
|
-
datamodel_code_generator/model/pydantic/types.py,sha256=
|
|
28
|
+
datamodel_code_generator/model/pydantic/types.py,sha256=ttTiDsQ6FV3h4C_NTEhvPUmUpeqxBNQt-DJJFpKZS8s,13356
|
|
29
29
|
datamodel_code_generator/model/pydantic_v2/__init__.py,sha256=xsfYcIUA2S8XzPIsYQSzDuBYZ1XRicfhGLHlQBlZwsg,1226
|
|
30
|
-
datamodel_code_generator/model/pydantic_v2/base_model.py,sha256=
|
|
30
|
+
datamodel_code_generator/model/pydantic_v2/base_model.py,sha256=J_DxR6Auw0L-zHn0F5l9K8XtSmfEvDT26Bj-VZxihiE,8353
|
|
31
31
|
datamodel_code_generator/model/pydantic_v2/imports.py,sha256=K3XD2kF9YCKmo5_7b2ipV5bGUrjz0avS-SiyDMVIpF0,299
|
|
32
32
|
datamodel_code_generator/model/pydantic_v2/root_model.py,sha256=H4rwtg56N65-I3QHlPvlNhDcSPV0m56KSAgfGmxYXAQ,888
|
|
33
33
|
datamodel_code_generator/model/pydantic_v2/types.py,sha256=apEuIhCBa15WdwGA0R9cqjpiH8mTLgAgu02CjcU4Css,2064
|
|
@@ -48,12 +48,12 @@ datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2,sha256=XdSC
|
|
|
48
48
|
datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2,sha256=xHvBYrh__32O1xRCSl6_u5zbyYIjB8a5k8fZiTo0spY,149
|
|
49
49
|
datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2,sha256=XQBlML7Hm5hN6_AExENNvVc_yxNWijcIfTTbbmegCpE,1223
|
|
50
50
|
datamodel_code_generator/parser/__init__.py,sha256=3XtFcDPocaetfjmWFqj_CubqNCDipb7vXZHsYKdJXXU,851
|
|
51
|
-
datamodel_code_generator/parser/base.py,sha256=
|
|
52
|
-
datamodel_code_generator/parser/graphql.py,sha256=
|
|
53
|
-
datamodel_code_generator/parser/jsonschema.py,sha256=
|
|
54
|
-
datamodel_code_generator/parser/openapi.py,sha256=
|
|
55
|
-
datamodel_code_generator-0.28.
|
|
56
|
-
datamodel_code_generator-0.28.
|
|
57
|
-
datamodel_code_generator-0.28.
|
|
58
|
-
datamodel_code_generator-0.28.
|
|
59
|
-
datamodel_code_generator-0.28.
|
|
51
|
+
datamodel_code_generator/parser/base.py,sha256=COceRiOHtaOOS0gvDvKLNboxD4KY0ofJdmSdoyPBfNw,60618
|
|
52
|
+
datamodel_code_generator/parser/graphql.py,sha256=ODbkMp42fYh8kH81KNeAffcFTmJb6_hgMyCsLUmdpMo,22585
|
|
53
|
+
datamodel_code_generator/parser/jsonschema.py,sha256=J5Ec5Vf4LGjCRWPraBmpTkepq-tN0ri5MKB9bUSDdFc,69912
|
|
54
|
+
datamodel_code_generator/parser/openapi.py,sha256=MfHSVfwq7qx_YEOiNel-ABbwISdH-kp87Q09WeNr_5w,27180
|
|
55
|
+
datamodel_code_generator-0.28.5.dist-info/METADATA,sha256=UVX-nQFVHIBUGD1dqfDNsZ4qw4eOyiOsQI8MzkGrKUs,25189
|
|
56
|
+
datamodel_code_generator-0.28.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
57
|
+
datamodel_code_generator-0.28.5.dist-info/entry_points.txt,sha256=cJVcHiEViQMANaoM5C1xR5hzmyCqH6hHHMpV8W00in8,77
|
|
58
|
+
datamodel_code_generator-0.28.5.dist-info/licenses/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
|
|
59
|
+
datamodel_code_generator-0.28.5.dist-info/RECORD,,
|
{datamodel_code_generator-0.28.4.dist-info → datamodel_code_generator-0.28.5.dist-info}/WHEEL
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|