datamodel-code-generator 0.28.3__py3-none-any.whl → 0.28.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamodel-code-generator might be problematic. Click here for more details.
- datamodel_code_generator/__init__.py +11 -3
- datamodel_code_generator/__main__.py +5 -1
- datamodel_code_generator/arguments.py +8 -1
- datamodel_code_generator/format.py +43 -2
- datamodel_code_generator/model/base.py +18 -6
- datamodel_code_generator/model/dataclass.py +4 -0
- datamodel_code_generator/model/enum.py +3 -1
- datamodel_code_generator/model/msgspec.py +4 -0
- datamodel_code_generator/model/pydantic/base_model.py +4 -0
- datamodel_code_generator/model/pydantic/types.py +2 -0
- datamodel_code_generator/model/pydantic_v2/base_model.py +2 -0
- datamodel_code_generator/model/scalar.py +2 -0
- datamodel_code_generator/model/typed_dict.py +2 -0
- datamodel_code_generator/model/types.py +2 -0
- datamodel_code_generator/model/union.py +2 -0
- datamodel_code_generator/parser/base.py +43 -9
- datamodel_code_generator/parser/graphql.py +6 -3
- datamodel_code_generator/parser/jsonschema.py +21 -6
- datamodel_code_generator/parser/openapi.py +6 -3
- datamodel_code_generator/reference.py +4 -2
- datamodel_code_generator/types.py +58 -11
- {datamodel_code_generator-0.28.3.dist-info → datamodel_code_generator-0.28.5.dist-info}/METADATA +6 -1
- {datamodel_code_generator-0.28.3.dist-info → datamodel_code_generator-0.28.5.dist-info}/RECORD +26 -26
- {datamodel_code_generator-0.28.3.dist-info → datamodel_code_generator-0.28.5.dist-info}/WHEEL +0 -0
- {datamodel_code_generator-0.28.3.dist-info → datamodel_code_generator-0.28.5.dist-info}/entry_points.txt +0 -0
- {datamodel_code_generator-0.28.3.dist-info → datamodel_code_generator-0.28.5.dist-info}/licenses/LICENSE +0 -0
|
@@ -22,7 +22,13 @@ from urllib.parse import ParseResult
|
|
|
22
22
|
import yaml
|
|
23
23
|
|
|
24
24
|
import datamodel_code_generator.pydantic_patch # noqa: F401
|
|
25
|
-
from datamodel_code_generator.format import
|
|
25
|
+
from datamodel_code_generator.format import (
|
|
26
|
+
DEFAULT_FORMATTERS,
|
|
27
|
+
DatetimeClassType,
|
|
28
|
+
Formatter,
|
|
29
|
+
PythonVersion,
|
|
30
|
+
PythonVersionMin,
|
|
31
|
+
)
|
|
26
32
|
from datamodel_code_generator.parser import DefaultPutDict, LiteralType
|
|
27
33
|
from datamodel_code_generator.util import SafeLoader
|
|
28
34
|
|
|
@@ -274,12 +280,13 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
|
|
|
274
280
|
custom_formatters_kwargs: dict[str, Any] | None = None,
|
|
275
281
|
use_pendulum: bool = False,
|
|
276
282
|
http_query_parameters: Sequence[tuple[str, str]] | None = None,
|
|
277
|
-
|
|
283
|
+
treat_dot_as_module: bool = False,
|
|
278
284
|
use_exact_imports: bool = False,
|
|
279
285
|
union_mode: UnionMode | None = None,
|
|
280
286
|
output_datetime_class: DatetimeClassType | None = None,
|
|
281
287
|
keyword_only: bool = False,
|
|
282
288
|
no_alias: bool = False,
|
|
289
|
+
formatters: list[Formatter] = DEFAULT_FORMATTERS,
|
|
283
290
|
) -> None:
|
|
284
291
|
remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
|
|
285
292
|
if isinstance(input_, str):
|
|
@@ -468,12 +475,13 @@ def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
|
|
|
468
475
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
469
476
|
use_pendulum=use_pendulum,
|
|
470
477
|
http_query_parameters=http_query_parameters,
|
|
471
|
-
|
|
478
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
472
479
|
use_exact_imports=use_exact_imports,
|
|
473
480
|
default_field_extras=default_field_extras,
|
|
474
481
|
target_datetime_class=output_datetime_class,
|
|
475
482
|
keyword_only=keyword_only,
|
|
476
483
|
no_alias=no_alias,
|
|
484
|
+
formatters=formatters,
|
|
477
485
|
**kwargs,
|
|
478
486
|
)
|
|
479
487
|
|
|
@@ -36,7 +36,9 @@ from datamodel_code_generator import (
|
|
|
36
36
|
)
|
|
37
37
|
from datamodel_code_generator.arguments import DEFAULT_ENCODING, arg_parser, namespace
|
|
38
38
|
from datamodel_code_generator.format import (
|
|
39
|
+
DEFAULT_FORMATTERS,
|
|
39
40
|
DatetimeClassType,
|
|
41
|
+
Formatter,
|
|
40
42
|
PythonVersion,
|
|
41
43
|
PythonVersionMin,
|
|
42
44
|
is_supported_in_black,
|
|
@@ -311,6 +313,7 @@ class Config(BaseModel):
|
|
|
311
313
|
output_datetime_class: Optional[DatetimeClassType] = None # noqa: UP045
|
|
312
314
|
keyword_only: bool = False
|
|
313
315
|
no_alias: bool = False
|
|
316
|
+
formatters: list[Formatter] = DEFAULT_FORMATTERS
|
|
314
317
|
|
|
315
318
|
def merge_args(self, args: Namespace) -> None:
|
|
316
319
|
set_args = {f: getattr(args, f) for f in self.get_fields() if getattr(args, f) is not None}
|
|
@@ -513,12 +516,13 @@ def main(args: Sequence[str] | None = None) -> Exit: # noqa: PLR0911, PLR0912,
|
|
|
513
516
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
514
517
|
use_pendulum=config.use_pendulum,
|
|
515
518
|
http_query_parameters=config.http_query_parameters,
|
|
516
|
-
|
|
519
|
+
treat_dot_as_module=config.treat_dot_as_module,
|
|
517
520
|
use_exact_imports=config.use_exact_imports,
|
|
518
521
|
union_mode=config.union_mode,
|
|
519
522
|
output_datetime_class=config.output_datetime_class,
|
|
520
523
|
keyword_only=config.keyword_only,
|
|
521
524
|
no_alias=config.no_alias,
|
|
525
|
+
formatters=config.formatters,
|
|
522
526
|
)
|
|
523
527
|
except InvalidClassNameError as e:
|
|
524
528
|
print(f"{e} You have to set `--class-name` option", file=sys.stderr) # noqa: T201
|
|
@@ -6,7 +6,7 @@ from operator import attrgetter
|
|
|
6
6
|
from typing import TYPE_CHECKING
|
|
7
7
|
|
|
8
8
|
from datamodel_code_generator import DataModelType, InputFileType, OpenAPIScope
|
|
9
|
-
from datamodel_code_generator.format import DatetimeClassType, PythonVersion
|
|
9
|
+
from datamodel_code_generator.format import DatetimeClassType, Formatter, PythonVersion
|
|
10
10
|
from datamodel_code_generator.model.pydantic_v2 import UnionMode
|
|
11
11
|
from datamodel_code_generator.parser import LiteralType
|
|
12
12
|
from datamodel_code_generator.types import StrictTypes
|
|
@@ -440,6 +440,13 @@ base_options.add_argument(
|
|
|
440
440
|
type=str,
|
|
441
441
|
default=None,
|
|
442
442
|
)
|
|
443
|
+
base_options.add_argument(
|
|
444
|
+
"--formatters",
|
|
445
|
+
help="Formatters for output (default: [black, isort])",
|
|
446
|
+
choices=[f.value for f in Formatter],
|
|
447
|
+
nargs="+",
|
|
448
|
+
default=None,
|
|
449
|
+
)
|
|
443
450
|
base_options.add_argument(
|
|
444
451
|
"--custom-formatters",
|
|
445
452
|
help="List of modules with custom formatter (delimited list input).",
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import subprocess # noqa: S404
|
|
3
4
|
from enum import Enum
|
|
4
5
|
from functools import cached_property
|
|
5
6
|
from importlib import import_module
|
|
@@ -89,6 +90,16 @@ def black_find_project_root(sources: Sequence[Path]) -> Path:
|
|
|
89
90
|
return project_root
|
|
90
91
|
|
|
91
92
|
|
|
93
|
+
class Formatter(Enum):
|
|
94
|
+
BLACK = "black"
|
|
95
|
+
ISORT = "isort"
|
|
96
|
+
RUFF_CHECK = "ruff-check"
|
|
97
|
+
RUFF_FORMAT = "ruff-format"
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
DEFAULT_FORMATTERS = [Formatter.BLACK, Formatter.ISORT]
|
|
101
|
+
|
|
102
|
+
|
|
92
103
|
class CodeFormatter:
|
|
93
104
|
def __init__( # noqa: PLR0912, PLR0913, PLR0917
|
|
94
105
|
self,
|
|
@@ -99,6 +110,8 @@ class CodeFormatter:
|
|
|
99
110
|
known_third_party: list[str] | None = None,
|
|
100
111
|
custom_formatters: list[str] | None = None,
|
|
101
112
|
custom_formatters_kwargs: dict[str, Any] | None = None,
|
|
113
|
+
encoding: str = "utf-8",
|
|
114
|
+
formatters: list[Formatter] = DEFAULT_FORMATTERS,
|
|
102
115
|
) -> None:
|
|
103
116
|
if not settings_path:
|
|
104
117
|
settings_path = Path.cwd()
|
|
@@ -158,6 +171,8 @@ class CodeFormatter:
|
|
|
158
171
|
|
|
159
172
|
self.custom_formatters_kwargs = custom_formatters_kwargs or {}
|
|
160
173
|
self.custom_formatters = self._check_custom_formatters(custom_formatters)
|
|
174
|
+
self.encoding = encoding
|
|
175
|
+
self.formatters = formatters
|
|
161
176
|
|
|
162
177
|
def _load_custom_formatter(self, custom_formatter_import: str) -> CustomCodeFormatter:
|
|
163
178
|
import_ = import_module(custom_formatter_import)
|
|
@@ -184,8 +199,16 @@ class CodeFormatter:
|
|
|
184
199
|
self,
|
|
185
200
|
code: str,
|
|
186
201
|
) -> str:
|
|
187
|
-
|
|
188
|
-
|
|
202
|
+
if Formatter.ISORT in self.formatters:
|
|
203
|
+
code = self.apply_isort(code)
|
|
204
|
+
if Formatter.BLACK in self.formatters:
|
|
205
|
+
code = self.apply_black(code)
|
|
206
|
+
|
|
207
|
+
if Formatter.RUFF_CHECK in self.formatters:
|
|
208
|
+
code = self.apply_ruff_lint(code)
|
|
209
|
+
|
|
210
|
+
if Formatter.RUFF_FORMAT in self.formatters:
|
|
211
|
+
code = self.apply_ruff_formatter(code)
|
|
189
212
|
|
|
190
213
|
for formatter in self.custom_formatters:
|
|
191
214
|
code = formatter.apply(code)
|
|
@@ -198,6 +221,24 @@ class CodeFormatter:
|
|
|
198
221
|
mode=self.black_mode,
|
|
199
222
|
)
|
|
200
223
|
|
|
224
|
+
def apply_ruff_lint(self, code: str) -> str:
|
|
225
|
+
result = subprocess.run( # noqa: S603
|
|
226
|
+
("ruff", "check", "--fix", "-"),
|
|
227
|
+
input=code.encode(self.encoding),
|
|
228
|
+
capture_output=True,
|
|
229
|
+
check=False,
|
|
230
|
+
)
|
|
231
|
+
return result.stdout.decode(self.encoding)
|
|
232
|
+
|
|
233
|
+
def apply_ruff_formatter(self, code: str) -> str:
|
|
234
|
+
result = subprocess.run( # noqa: S603
|
|
235
|
+
("ruff", "format", "-"),
|
|
236
|
+
input=code.encode(self.encoding),
|
|
237
|
+
capture_output=True,
|
|
238
|
+
check=False,
|
|
239
|
+
)
|
|
240
|
+
return result.stdout.decode(self.encoding)
|
|
241
|
+
|
|
201
242
|
if TYPE_CHECKING:
|
|
202
243
|
|
|
203
244
|
def apply_isort(self, code: str) -> str: ...
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import re
|
|
3
4
|
from abc import ABC, abstractmethod
|
|
4
5
|
from collections import defaultdict
|
|
5
6
|
from copy import deepcopy
|
|
@@ -203,18 +204,27 @@ def get_template(template_file_path: Path) -> Template:
|
|
|
203
204
|
return environment.get_template(template_file_path.name)
|
|
204
205
|
|
|
205
206
|
|
|
206
|
-
def
|
|
207
|
+
def sanitize_module_name(name: str, *, treat_dot_as_module: bool) -> str:
|
|
208
|
+
pattern = r"[^0-9a-zA-Z_.]" if treat_dot_as_module else r"[^0-9a-zA-Z_]"
|
|
209
|
+
sanitized = re.sub(pattern, "_", name)
|
|
210
|
+
if sanitized and sanitized[0].isdigit():
|
|
211
|
+
sanitized = f"_{sanitized}"
|
|
212
|
+
return sanitized
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def get_module_path(name: str, file_path: Path | None, *, treat_dot_as_module: bool) -> list[str]:
|
|
207
216
|
if file_path:
|
|
217
|
+
sanitized_stem = sanitize_module_name(file_path.stem, treat_dot_as_module=treat_dot_as_module)
|
|
208
218
|
return [
|
|
209
219
|
*file_path.parts[:-1],
|
|
210
|
-
|
|
220
|
+
sanitized_stem,
|
|
211
221
|
*name.split(".")[:-1],
|
|
212
222
|
]
|
|
213
223
|
return name.split(".")[:-1]
|
|
214
224
|
|
|
215
225
|
|
|
216
|
-
def get_module_name(name: str, file_path: Path | None) -> str:
|
|
217
|
-
return ".".join(get_module_path(name, file_path))
|
|
226
|
+
def get_module_name(name: str, file_path: Path | None, *, treat_dot_as_module: bool) -> str:
|
|
227
|
+
return ".".join(get_module_path(name, file_path, treat_dot_as_module=treat_dot_as_module))
|
|
218
228
|
|
|
219
229
|
|
|
220
230
|
class TemplateBase(ABC):
|
|
@@ -265,6 +275,7 @@ class DataModel(TemplateBase, Nullable, ABC):
|
|
|
265
275
|
default: Any = UNDEFINED,
|
|
266
276
|
nullable: bool = False,
|
|
267
277
|
keyword_only: bool = False,
|
|
278
|
+
treat_dot_as_module: bool = False,
|
|
268
279
|
) -> None:
|
|
269
280
|
self.keyword_only = keyword_only
|
|
270
281
|
if not self.TEMPLATE_FILE_PATH:
|
|
@@ -313,6 +324,7 @@ class DataModel(TemplateBase, Nullable, ABC):
|
|
|
313
324
|
self._additional_imports.extend(self.DEFAULT_IMPORTS)
|
|
314
325
|
self.default: Any = default
|
|
315
326
|
self._nullable: bool = nullable
|
|
327
|
+
self._treat_dot_as_module: bool = treat_dot_as_module
|
|
316
328
|
|
|
317
329
|
def _validate_fields(self, fields: list[DataModelFieldBase]) -> list[DataModelFieldBase]:
|
|
318
330
|
names: set[str] = set()
|
|
@@ -393,11 +405,11 @@ class DataModel(TemplateBase, Nullable, ABC):
|
|
|
393
405
|
|
|
394
406
|
@property
|
|
395
407
|
def module_path(self) -> list[str]:
|
|
396
|
-
return get_module_path(self.name, self.file_path)
|
|
408
|
+
return get_module_path(self.name, self.file_path, treat_dot_as_module=self._treat_dot_as_module)
|
|
397
409
|
|
|
398
410
|
@property
|
|
399
411
|
def module_name(self) -> str:
|
|
400
|
-
return get_module_name(self.name, self.file_path)
|
|
412
|
+
return get_module_name(self.name, self.file_path, treat_dot_as_module=self._treat_dot_as_module)
|
|
401
413
|
|
|
402
414
|
@property
|
|
403
415
|
def all_data_types(self) -> Iterator[DataType]:
|
|
@@ -53,6 +53,7 @@ class DataClass(DataModel):
|
|
|
53
53
|
default: Any = UNDEFINED,
|
|
54
54
|
nullable: bool = False,
|
|
55
55
|
keyword_only: bool = False,
|
|
56
|
+
treat_dot_as_module: bool = False,
|
|
56
57
|
) -> None:
|
|
57
58
|
super().__init__(
|
|
58
59
|
reference=reference,
|
|
@@ -68,6 +69,7 @@ class DataClass(DataModel):
|
|
|
68
69
|
default=default,
|
|
69
70
|
nullable=nullable,
|
|
70
71
|
keyword_only=keyword_only,
|
|
72
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
71
73
|
)
|
|
72
74
|
|
|
73
75
|
|
|
@@ -144,6 +146,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
144
146
|
use_union_operator: bool = False, # noqa: FBT001, FBT002
|
|
145
147
|
use_pendulum: bool = False, # noqa: FBT001, FBT002
|
|
146
148
|
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
149
|
+
treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
|
|
147
150
|
) -> None:
|
|
148
151
|
super().__init__(
|
|
149
152
|
python_version,
|
|
@@ -154,6 +157,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
154
157
|
use_union_operator,
|
|
155
158
|
use_pendulum,
|
|
156
159
|
target_datetime_class,
|
|
160
|
+
treat_dot_as_module,
|
|
157
161
|
)
|
|
158
162
|
|
|
159
163
|
datetime_map = (
|
|
@@ -52,6 +52,7 @@ class Enum(DataModel):
|
|
|
52
52
|
default: Any = UNDEFINED,
|
|
53
53
|
nullable: bool = False,
|
|
54
54
|
keyword_only: bool = False,
|
|
55
|
+
treat_dot_as_module: bool = False,
|
|
55
56
|
) -> None:
|
|
56
57
|
super().__init__(
|
|
57
58
|
reference=reference,
|
|
@@ -67,6 +68,7 @@ class Enum(DataModel):
|
|
|
67
68
|
default=default,
|
|
68
69
|
nullable=nullable,
|
|
69
70
|
keyword_only=keyword_only,
|
|
71
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
70
72
|
)
|
|
71
73
|
|
|
72
74
|
if not base_classes and type_:
|
|
@@ -91,7 +93,7 @@ class Enum(DataModel):
|
|
|
91
93
|
|
|
92
94
|
for field in self.fields:
|
|
93
95
|
# Remove surrounding quotes from field default value
|
|
94
|
-
field_default = (field.default or "").strip("'\"")
|
|
96
|
+
field_default = str(field.default or "").strip("'\"")
|
|
95
97
|
|
|
96
98
|
# Compare values after removing quotes
|
|
97
99
|
if field_default == str_value:
|
|
@@ -97,6 +97,7 @@ class Struct(DataModel):
|
|
|
97
97
|
default: Any = UNDEFINED,
|
|
98
98
|
nullable: bool = False,
|
|
99
99
|
keyword_only: bool = False,
|
|
100
|
+
treat_dot_as_module: bool = False,
|
|
100
101
|
) -> None:
|
|
101
102
|
super().__init__(
|
|
102
103
|
reference=reference,
|
|
@@ -112,6 +113,7 @@ class Struct(DataModel):
|
|
|
112
113
|
default=default,
|
|
113
114
|
nullable=nullable,
|
|
114
115
|
keyword_only=keyword_only,
|
|
116
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
115
117
|
)
|
|
116
118
|
self.extra_template_data.setdefault("base_class_kwargs", {})
|
|
117
119
|
if self.keyword_only:
|
|
@@ -287,6 +289,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
287
289
|
use_union_operator: bool = False, # noqa: FBT001, FBT002
|
|
288
290
|
use_pendulum: bool = False, # noqa: FBT001, FBT002
|
|
289
291
|
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
292
|
+
treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
|
|
290
293
|
) -> None:
|
|
291
294
|
super().__init__(
|
|
292
295
|
python_version,
|
|
@@ -297,6 +300,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
297
300
|
use_union_operator,
|
|
298
301
|
use_pendulum,
|
|
299
302
|
target_datetime_class,
|
|
303
|
+
treat_dot_as_module,
|
|
300
304
|
)
|
|
301
305
|
|
|
302
306
|
datetime_map = (
|
|
@@ -213,6 +213,7 @@ class BaseModelBase(DataModel, ABC):
|
|
|
213
213
|
default: Any = UNDEFINED,
|
|
214
214
|
nullable: bool = False,
|
|
215
215
|
keyword_only: bool = False,
|
|
216
|
+
treat_dot_as_module: bool = False,
|
|
216
217
|
) -> None:
|
|
217
218
|
methods: list[str] = [field.method for field in fields if field.method]
|
|
218
219
|
|
|
@@ -230,6 +231,7 @@ class BaseModelBase(DataModel, ABC):
|
|
|
230
231
|
default=default,
|
|
231
232
|
nullable=nullable,
|
|
232
233
|
keyword_only=keyword_only,
|
|
234
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
233
235
|
)
|
|
234
236
|
|
|
235
237
|
@cached_property
|
|
@@ -263,6 +265,7 @@ class BaseModel(BaseModelBase):
|
|
|
263
265
|
default: Any = UNDEFINED,
|
|
264
266
|
nullable: bool = False,
|
|
265
267
|
keyword_only: bool = False,
|
|
268
|
+
treat_dot_as_module: bool = False,
|
|
266
269
|
) -> None:
|
|
267
270
|
super().__init__(
|
|
268
271
|
reference=reference,
|
|
@@ -277,6 +280,7 @@ class BaseModel(BaseModelBase):
|
|
|
277
280
|
default=default,
|
|
278
281
|
nullable=nullable,
|
|
279
282
|
keyword_only=keyword_only,
|
|
283
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
280
284
|
)
|
|
281
285
|
config_parameters: dict[str, Any] = {}
|
|
282
286
|
|
|
@@ -166,6 +166,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
166
166
|
use_union_operator: bool = False, # noqa: FBT001, FBT002
|
|
167
167
|
use_pendulum: bool = False, # noqa: FBT001, FBT002
|
|
168
168
|
target_datetime_class: DatetimeClassType | None = None,
|
|
169
|
+
treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
|
|
169
170
|
) -> None:
|
|
170
171
|
super().__init__(
|
|
171
172
|
python_version,
|
|
@@ -176,6 +177,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
176
177
|
use_union_operator,
|
|
177
178
|
use_pendulum,
|
|
178
179
|
target_datetime_class,
|
|
180
|
+
treat_dot_as_module,
|
|
179
181
|
)
|
|
180
182
|
|
|
181
183
|
self.type_map: dict[Types, DataType] = self.type_map_factory(
|
|
@@ -179,6 +179,7 @@ class BaseModel(BaseModelBase):
|
|
|
179
179
|
default: Any = UNDEFINED,
|
|
180
180
|
nullable: bool = False,
|
|
181
181
|
keyword_only: bool = False,
|
|
182
|
+
treat_dot_as_module: bool = False,
|
|
182
183
|
) -> None:
|
|
183
184
|
super().__init__(
|
|
184
185
|
reference=reference,
|
|
@@ -193,6 +194,7 @@ class BaseModel(BaseModelBase):
|
|
|
193
194
|
default=default,
|
|
194
195
|
nullable=nullable,
|
|
195
196
|
keyword_only=keyword_only,
|
|
197
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
196
198
|
)
|
|
197
199
|
config_parameters: dict[str, Any] = {}
|
|
198
200
|
|
|
@@ -50,6 +50,7 @@ class DataTypeScalar(DataModel):
|
|
|
50
50
|
default: Any = UNDEFINED,
|
|
51
51
|
nullable: bool = False,
|
|
52
52
|
keyword_only: bool = False,
|
|
53
|
+
treat_dot_as_module: bool = False,
|
|
53
54
|
) -> None:
|
|
54
55
|
extra_template_data = extra_template_data or defaultdict(dict)
|
|
55
56
|
|
|
@@ -78,4 +79,5 @@ class DataTypeScalar(DataModel):
|
|
|
78
79
|
default=default,
|
|
79
80
|
nullable=nullable,
|
|
80
81
|
keyword_only=keyword_only,
|
|
82
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
81
83
|
)
|
|
@@ -60,6 +60,7 @@ class TypedDict(DataModel):
|
|
|
60
60
|
default: Any = UNDEFINED,
|
|
61
61
|
nullable: bool = False,
|
|
62
62
|
keyword_only: bool = False,
|
|
63
|
+
treat_dot_as_module: bool = False,
|
|
63
64
|
) -> None:
|
|
64
65
|
super().__init__(
|
|
65
66
|
reference=reference,
|
|
@@ -75,6 +76,7 @@ class TypedDict(DataModel):
|
|
|
75
76
|
default=default,
|
|
76
77
|
nullable=nullable,
|
|
77
78
|
keyword_only=keyword_only,
|
|
79
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
78
80
|
)
|
|
79
81
|
|
|
80
82
|
@property
|
|
@@ -68,6 +68,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
68
68
|
use_union_operator: bool = False, # noqa: FBT001, FBT002
|
|
69
69
|
use_pendulum: bool = False, # noqa: FBT001, FBT002
|
|
70
70
|
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
71
|
+
treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
|
|
71
72
|
) -> None:
|
|
72
73
|
super().__init__(
|
|
73
74
|
python_version,
|
|
@@ -78,6 +79,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
78
79
|
use_union_operator,
|
|
79
80
|
use_pendulum,
|
|
80
81
|
target_datetime_class,
|
|
82
|
+
treat_dot_as_module,
|
|
81
83
|
)
|
|
82
84
|
|
|
83
85
|
self.type_map: dict[Types, DataType] = type_map_factory(self.data_type)
|
|
@@ -37,6 +37,7 @@ class DataTypeUnion(DataModel):
|
|
|
37
37
|
default: Any = UNDEFINED,
|
|
38
38
|
nullable: bool = False,
|
|
39
39
|
keyword_only: bool = False,
|
|
40
|
+
treat_dot_as_module: bool = False,
|
|
40
41
|
) -> None:
|
|
41
42
|
super().__init__(
|
|
42
43
|
reference=reference,
|
|
@@ -52,4 +53,5 @@ class DataTypeUnion(DataModel):
|
|
|
52
53
|
default=default,
|
|
53
54
|
nullable=nullable,
|
|
54
55
|
keyword_only=keyword_only,
|
|
56
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
55
57
|
)
|
|
@@ -7,12 +7,19 @@ from abc import ABC, abstractmethod
|
|
|
7
7
|
from collections import OrderedDict, defaultdict
|
|
8
8
|
from itertools import groupby
|
|
9
9
|
from pathlib import Path
|
|
10
|
-
from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, Protocol, TypeVar, runtime_checkable
|
|
10
|
+
from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, Protocol, TypeVar, cast, runtime_checkable
|
|
11
11
|
from urllib.parse import ParseResult
|
|
12
12
|
|
|
13
13
|
from pydantic import BaseModel
|
|
14
14
|
|
|
15
|
-
from datamodel_code_generator.format import
|
|
15
|
+
from datamodel_code_generator.format import (
|
|
16
|
+
DEFAULT_FORMATTERS,
|
|
17
|
+
CodeFormatter,
|
|
18
|
+
DatetimeClassType,
|
|
19
|
+
Formatter,
|
|
20
|
+
PythonVersion,
|
|
21
|
+
PythonVersionMin,
|
|
22
|
+
)
|
|
16
23
|
from datamodel_code_generator.imports import (
|
|
17
24
|
IMPORT_ANNOTATIONS,
|
|
18
25
|
IMPORT_LITERAL,
|
|
@@ -363,12 +370,13 @@ class Parser(ABC):
|
|
|
363
370
|
custom_formatters_kwargs: dict[str, Any] | None = None,
|
|
364
371
|
use_pendulum: bool = False,
|
|
365
372
|
http_query_parameters: Sequence[tuple[str, str]] | None = None,
|
|
366
|
-
|
|
373
|
+
treat_dot_as_module: bool = False,
|
|
367
374
|
use_exact_imports: bool = False,
|
|
368
375
|
default_field_extras: dict[str, Any] | None = None,
|
|
369
376
|
target_datetime_class: DatetimeClassType | None = DatetimeClassType.Datetime,
|
|
370
377
|
keyword_only: bool = False,
|
|
371
378
|
no_alias: bool = False,
|
|
379
|
+
formatters: list[Formatter] = DEFAULT_FORMATTERS,
|
|
372
380
|
) -> None:
|
|
373
381
|
self.keyword_only = keyword_only
|
|
374
382
|
self.data_type_manager: DataTypeManager = data_type_manager_type(
|
|
@@ -379,6 +387,7 @@ class Parser(ABC):
|
|
|
379
387
|
use_union_operator=use_union_operator,
|
|
380
388
|
use_pendulum=use_pendulum,
|
|
381
389
|
target_datetime_class=target_datetime_class,
|
|
390
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
382
391
|
)
|
|
383
392
|
self.data_model_type: type[DataModel] = data_model_type
|
|
384
393
|
self.data_model_root_type: type[DataModel] = data_model_root_type
|
|
@@ -474,8 +483,9 @@ class Parser(ABC):
|
|
|
474
483
|
self.known_third_party = known_third_party
|
|
475
484
|
self.custom_formatter = custom_formatters
|
|
476
485
|
self.custom_formatters_kwargs = custom_formatters_kwargs
|
|
477
|
-
self.
|
|
486
|
+
self.treat_dot_as_module = treat_dot_as_module
|
|
478
487
|
self.default_field_extras: dict[str, Any] | None = default_field_extras
|
|
488
|
+
self.formatters: list[Formatter] = formatters
|
|
479
489
|
|
|
480
490
|
@property
|
|
481
491
|
def iter_source(self) -> Iterator[Source]:
|
|
@@ -657,7 +667,7 @@ class Parser(ABC):
|
|
|
657
667
|
if (
|
|
658
668
|
len(model.module_path) > 1
|
|
659
669
|
and model.module_path[-1].count(".") > 0
|
|
660
|
-
and not self.
|
|
670
|
+
and not self.treat_dot_as_module
|
|
661
671
|
):
|
|
662
672
|
rel_path_depth = model.module_path[-1].count(".")
|
|
663
673
|
from_ = from_[rel_path_depth:]
|
|
@@ -718,6 +728,8 @@ class Parser(ABC):
|
|
|
718
728
|
property_name = discriminator.get("propertyName")
|
|
719
729
|
if not property_name: # pragma: no cover
|
|
720
730
|
continue
|
|
731
|
+
field_name, alias = self.model_resolver.get_valid_field_name_and_alias(field_name=property_name)
|
|
732
|
+
discriminator["propertyName"] = field_name
|
|
721
733
|
mapping = discriminator.get("mapping", {})
|
|
722
734
|
for data_type in field.data_type.data_types:
|
|
723
735
|
if not data_type.reference: # pragma: no cover
|
|
@@ -769,13 +781,13 @@ class Parser(ABC):
|
|
|
769
781
|
raise RuntimeError(msg)
|
|
770
782
|
has_one_literal = False
|
|
771
783
|
for discriminator_field in discriminator_model.fields:
|
|
772
|
-
if
|
|
784
|
+
if field_name not in {discriminator_field.original_name, discriminator_field.name}:
|
|
773
785
|
continue
|
|
774
786
|
literals = discriminator_field.data_type.literals
|
|
775
787
|
if len(literals) == 1 and literals[0] == (type_names[0] if type_names else None):
|
|
776
788
|
has_one_literal = True
|
|
777
789
|
if isinstance(discriminator_model, msgspec_model.Struct): # pragma: no cover
|
|
778
|
-
discriminator_model.add_base_class_kwarg("tag_field", f"'{
|
|
790
|
+
discriminator_model.add_base_class_kwarg("tag_field", f"'{field_name}'")
|
|
779
791
|
discriminator_model.add_base_class_kwarg("tag", discriminator_field.represented_default)
|
|
780
792
|
discriminator_field.extras["is_classvar"] = True
|
|
781
793
|
# Found the discriminator field, no need to keep looking
|
|
@@ -791,9 +803,10 @@ class Parser(ABC):
|
|
|
791
803
|
if not has_one_literal:
|
|
792
804
|
discriminator_model.fields.append(
|
|
793
805
|
self.data_model_field_type(
|
|
794
|
-
name=
|
|
806
|
+
name=field_name,
|
|
795
807
|
data_type=self.data_type(literals=type_names),
|
|
796
808
|
required=True,
|
|
809
|
+
alias=alias,
|
|
797
810
|
)
|
|
798
811
|
)
|
|
799
812
|
has_imported_literal = any(import_ == IMPORT_LITERAL for import_ in imports)
|
|
@@ -1093,6 +1106,24 @@ class Parser(ABC):
|
|
|
1093
1106
|
models[i], models[i + 1] = models[i + 1], model
|
|
1094
1107
|
changed = True
|
|
1095
1108
|
|
|
1109
|
+
def __change_field_name(
|
|
1110
|
+
self,
|
|
1111
|
+
models: list[DataModel],
|
|
1112
|
+
) -> None:
|
|
1113
|
+
if self.data_model_type != pydantic_model_v2.BaseModel:
|
|
1114
|
+
return
|
|
1115
|
+
for model in models:
|
|
1116
|
+
for field in model.fields:
|
|
1117
|
+
filed_name = field.name
|
|
1118
|
+
filed_name_resolver = ModelResolver(snake_case_field=self.snake_case_field, remove_suffix_number=True)
|
|
1119
|
+
for data_type in field.data_type.all_data_types:
|
|
1120
|
+
if data_type.reference:
|
|
1121
|
+
filed_name_resolver.exclude_names.add(data_type.reference.short_name)
|
|
1122
|
+
new_filed_name = filed_name_resolver.add(["field"], cast("str", filed_name)).name
|
|
1123
|
+
if filed_name != new_filed_name:
|
|
1124
|
+
field.alias = filed_name
|
|
1125
|
+
field.name = new_filed_name
|
|
1126
|
+
|
|
1096
1127
|
def __set_one_literal_on_default(self, models: list[DataModel]) -> None:
|
|
1097
1128
|
if not self.use_one_literal_as_default:
|
|
1098
1129
|
return
|
|
@@ -1173,6 +1204,8 @@ class Parser(ABC):
|
|
|
1173
1204
|
known_third_party=self.known_third_party,
|
|
1174
1205
|
custom_formatters=self.custom_formatter,
|
|
1175
1206
|
custom_formatters_kwargs=self.custom_formatters_kwargs,
|
|
1207
|
+
encoding=self.encoding,
|
|
1208
|
+
formatters=self.formatters,
|
|
1176
1209
|
)
|
|
1177
1210
|
else:
|
|
1178
1211
|
code_formatter = None
|
|
@@ -1254,6 +1287,7 @@ class Parser(ABC):
|
|
|
1254
1287
|
self.__collapse_root_models(models, unused_models, imports, scoped_model_resolver)
|
|
1255
1288
|
self.__set_default_enum_member(models)
|
|
1256
1289
|
self.__sort_models(models, imports)
|
|
1290
|
+
self.__change_field_name(models)
|
|
1257
1291
|
self.__apply_discriminator_type(models, imports)
|
|
1258
1292
|
self.__set_one_literal_on_default(models)
|
|
1259
1293
|
|
|
@@ -1317,7 +1351,7 @@ class Parser(ABC):
|
|
|
1317
1351
|
results = {tuple(i.replace("-", "_") for i in k): v for k, v in results.items()}
|
|
1318
1352
|
return (
|
|
1319
1353
|
self.__postprocess_result_modules(results)
|
|
1320
|
-
if self.
|
|
1354
|
+
if self.treat_dot_as_module
|
|
1321
1355
|
else {
|
|
1322
1356
|
tuple((part[: part.rfind(".")].replace(".", "_") + part[part.rfind(".") :]) for part in k): v
|
|
1323
1357
|
for k, v in results.items()
|
|
@@ -36,7 +36,7 @@ except ImportError as exc: # pragma: no cover
|
|
|
36
36
|
raise Exception(msg) from exc # noqa: TRY002
|
|
37
37
|
|
|
38
38
|
|
|
39
|
-
from datamodel_code_generator.format import DatetimeClassType
|
|
39
|
+
from datamodel_code_generator.format import DEFAULT_FORMATTERS, DatetimeClassType, Formatter
|
|
40
40
|
|
|
41
41
|
if TYPE_CHECKING:
|
|
42
42
|
from collections import defaultdict
|
|
@@ -147,12 +147,13 @@ class GraphQLParser(Parser):
|
|
|
147
147
|
custom_formatters_kwargs: dict[str, Any] | None = None,
|
|
148
148
|
use_pendulum: bool = False,
|
|
149
149
|
http_query_parameters: Sequence[tuple[str, str]] | None = None,
|
|
150
|
-
|
|
150
|
+
treat_dot_as_module: bool = False,
|
|
151
151
|
use_exact_imports: bool = False,
|
|
152
152
|
default_field_extras: dict[str, Any] | None = None,
|
|
153
153
|
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
154
154
|
keyword_only: bool = False,
|
|
155
155
|
no_alias: bool = False,
|
|
156
|
+
formatters: list[Formatter] = DEFAULT_FORMATTERS,
|
|
156
157
|
) -> None:
|
|
157
158
|
super().__init__(
|
|
158
159
|
source=source,
|
|
@@ -220,12 +221,13 @@ class GraphQLParser(Parser):
|
|
|
220
221
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
221
222
|
use_pendulum=use_pendulum,
|
|
222
223
|
http_query_parameters=http_query_parameters,
|
|
223
|
-
|
|
224
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
224
225
|
use_exact_imports=use_exact_imports,
|
|
225
226
|
default_field_extras=default_field_extras,
|
|
226
227
|
target_datetime_class=target_datetime_class,
|
|
227
228
|
keyword_only=keyword_only,
|
|
228
229
|
no_alias=no_alias,
|
|
230
|
+
formatters=formatters,
|
|
229
231
|
)
|
|
230
232
|
|
|
231
233
|
self.data_model_scalar_type = data_model_scalar_type
|
|
@@ -451,6 +453,7 @@ class GraphQLParser(Parser):
|
|
|
451
453
|
path=self.current_source_path,
|
|
452
454
|
description=obj.description,
|
|
453
455
|
keyword_only=self.keyword_only,
|
|
456
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
454
457
|
)
|
|
455
458
|
self.results.append(data_model_type)
|
|
456
459
|
|
|
@@ -19,7 +19,7 @@ from datamodel_code_generator import (
|
|
|
19
19
|
load_yaml_from_path,
|
|
20
20
|
snooper_to_methods,
|
|
21
21
|
)
|
|
22
|
-
from datamodel_code_generator.format import PythonVersion, PythonVersionMin
|
|
22
|
+
from datamodel_code_generator.format import DEFAULT_FORMATTERS, Formatter, PythonVersion, PythonVersionMin
|
|
23
23
|
from datamodel_code_generator.model import DataModel, DataModelFieldBase
|
|
24
24
|
from datamodel_code_generator.model import pydantic as pydantic_model
|
|
25
25
|
from datamodel_code_generator.model.base import UNDEFINED, get_module_name
|
|
@@ -416,12 +416,13 @@ class JsonSchemaParser(Parser):
|
|
|
416
416
|
custom_formatters_kwargs: dict[str, Any] | None = None,
|
|
417
417
|
use_pendulum: bool = False,
|
|
418
418
|
http_query_parameters: Sequence[tuple[str, str]] | None = None,
|
|
419
|
-
|
|
419
|
+
treat_dot_as_module: bool = False,
|
|
420
420
|
use_exact_imports: bool = False,
|
|
421
421
|
default_field_extras: dict[str, Any] | None = None,
|
|
422
422
|
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
423
423
|
keyword_only: bool = False,
|
|
424
424
|
no_alias: bool = False,
|
|
425
|
+
formatters: list[Formatter] = DEFAULT_FORMATTERS,
|
|
425
426
|
) -> None:
|
|
426
427
|
super().__init__(
|
|
427
428
|
source=source,
|
|
@@ -489,12 +490,13 @@ class JsonSchemaParser(Parser):
|
|
|
489
490
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
490
491
|
use_pendulum=use_pendulum,
|
|
491
492
|
http_query_parameters=http_query_parameters,
|
|
492
|
-
|
|
493
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
493
494
|
use_exact_imports=use_exact_imports,
|
|
494
495
|
default_field_extras=default_field_extras,
|
|
495
496
|
target_datetime_class=target_datetime_class,
|
|
496
497
|
keyword_only=keyword_only,
|
|
497
498
|
no_alias=no_alias,
|
|
499
|
+
formatters=formatters,
|
|
498
500
|
)
|
|
499
501
|
|
|
500
502
|
self.remote_object_cache: DefaultPutDict[str, dict[str, Any]] = DefaultPutDict()
|
|
@@ -693,7 +695,11 @@ class JsonSchemaParser(Parser):
|
|
|
693
695
|
required: list[str],
|
|
694
696
|
) -> DataType:
|
|
695
697
|
if obj.properties:
|
|
696
|
-
fields.extend(
|
|
698
|
+
fields.extend(
|
|
699
|
+
self.parse_object_fields(
|
|
700
|
+
obj, path, get_module_name(name, None, treat_dot_as_module=self.treat_dot_as_module)
|
|
701
|
+
)
|
|
702
|
+
)
|
|
697
703
|
# ignore an undetected object
|
|
698
704
|
if ignore_duplicate_model and not fields and len(base_classes) == 1:
|
|
699
705
|
with self.model_resolver.current_base_path_context(self.model_resolver._base_path): # noqa: SLF001
|
|
@@ -735,6 +741,7 @@ class JsonSchemaParser(Parser):
|
|
|
735
741
|
path=self.current_source_path,
|
|
736
742
|
description=obj.description if self.use_schema_description else None,
|
|
737
743
|
keyword_only=self.keyword_only,
|
|
744
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
738
745
|
)
|
|
739
746
|
self.results.append(data_model_type)
|
|
740
747
|
|
|
@@ -754,7 +761,7 @@ class JsonSchemaParser(Parser):
|
|
|
754
761
|
if all_of_item.ref: # $ref
|
|
755
762
|
base_classes.append(self.model_resolver.add_ref(all_of_item.ref))
|
|
756
763
|
else:
|
|
757
|
-
module_name = get_module_name(name, None)
|
|
764
|
+
module_name = get_module_name(name, None, treat_dot_as_module=self.treat_dot_as_module)
|
|
758
765
|
object_fields = self.parse_object_fields(
|
|
759
766
|
all_of_item,
|
|
760
767
|
path,
|
|
@@ -847,6 +854,7 @@ class JsonSchemaParser(Parser):
|
|
|
847
854
|
path=self.current_source_path,
|
|
848
855
|
description=obj.description if self.use_schema_description else None,
|
|
849
856
|
nullable=obj.type_has_null,
|
|
857
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
850
858
|
)
|
|
851
859
|
self.results.append(data_model_root)
|
|
852
860
|
return self.data_type(reference=reference)
|
|
@@ -933,7 +941,9 @@ class JsonSchemaParser(Parser):
|
|
|
933
941
|
)
|
|
934
942
|
class_name = reference.name
|
|
935
943
|
self.set_title(class_name, obj)
|
|
936
|
-
fields = self.parse_object_fields(
|
|
944
|
+
fields = self.parse_object_fields(
|
|
945
|
+
obj, path, get_module_name(class_name, None, treat_dot_as_module=self.treat_dot_as_module)
|
|
946
|
+
)
|
|
937
947
|
if fields or not isinstance(obj.additionalProperties, JsonSchemaObject):
|
|
938
948
|
data_model_type_class = self.data_model_type
|
|
939
949
|
else:
|
|
@@ -970,6 +980,7 @@ class JsonSchemaParser(Parser):
|
|
|
970
980
|
description=obj.description if self.use_schema_description else None,
|
|
971
981
|
nullable=obj.type_has_null,
|
|
972
982
|
keyword_only=self.keyword_only,
|
|
983
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
973
984
|
)
|
|
974
985
|
self.results.append(data_model_type)
|
|
975
986
|
return self.data_type(reference=reference)
|
|
@@ -1183,6 +1194,7 @@ class JsonSchemaParser(Parser):
|
|
|
1183
1194
|
path=self.current_source_path,
|
|
1184
1195
|
description=obj.description if self.use_schema_description else None,
|
|
1185
1196
|
nullable=obj.type_has_null,
|
|
1197
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
1186
1198
|
)
|
|
1187
1199
|
self.results.append(data_model_root)
|
|
1188
1200
|
return self.data_type(reference=reference)
|
|
@@ -1262,6 +1274,8 @@ class JsonSchemaParser(Parser):
|
|
|
1262
1274
|
extra_template_data=self.extra_template_data,
|
|
1263
1275
|
path=self.current_source_path,
|
|
1264
1276
|
nullable=obj.type_has_null,
|
|
1277
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
1278
|
+
default=obj.default if obj.has_default else UNDEFINED,
|
|
1265
1279
|
)
|
|
1266
1280
|
self.results.append(data_model_root_type)
|
|
1267
1281
|
return self.data_type(reference=reference)
|
|
@@ -1384,6 +1398,7 @@ class JsonSchemaParser(Parser):
|
|
|
1384
1398
|
path=self.current_source_path,
|
|
1385
1399
|
default=obj.default if obj.has_default else UNDEFINED,
|
|
1386
1400
|
nullable=obj.type_has_null,
|
|
1401
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
1387
1402
|
)
|
|
1388
1403
|
self.results.append(data_model_root_type)
|
|
1389
1404
|
return self.data_type(reference=reference)
|
|
@@ -18,7 +18,7 @@ from datamodel_code_generator import (
|
|
|
18
18
|
load_yaml,
|
|
19
19
|
snooper_to_methods,
|
|
20
20
|
)
|
|
21
|
-
from datamodel_code_generator.format import DatetimeClassType
|
|
21
|
+
from datamodel_code_generator.format import DEFAULT_FORMATTERS, DatetimeClassType, Formatter
|
|
22
22
|
from datamodel_code_generator.model import DataModel, DataModelFieldBase
|
|
23
23
|
from datamodel_code_generator.model import pydantic as pydantic_model
|
|
24
24
|
from datamodel_code_generator.parser import DefaultPutDict # noqa: TC001 # needed for type check
|
|
@@ -209,12 +209,13 @@ class OpenAPIParser(JsonSchemaParser):
|
|
|
209
209
|
custom_formatters_kwargs: dict[str, Any] | None = None,
|
|
210
210
|
use_pendulum: bool = False,
|
|
211
211
|
http_query_parameters: Sequence[tuple[str, str]] | None = None,
|
|
212
|
-
|
|
212
|
+
treat_dot_as_module: bool = False,
|
|
213
213
|
use_exact_imports: bool = False,
|
|
214
214
|
default_field_extras: dict[str, Any] | None = None,
|
|
215
215
|
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
216
216
|
keyword_only: bool = False,
|
|
217
217
|
no_alias: bool = False,
|
|
218
|
+
formatters: list[Formatter] = DEFAULT_FORMATTERS,
|
|
218
219
|
) -> None:
|
|
219
220
|
super().__init__(
|
|
220
221
|
source=source,
|
|
@@ -282,12 +283,13 @@ class OpenAPIParser(JsonSchemaParser):
|
|
|
282
283
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
283
284
|
use_pendulum=use_pendulum,
|
|
284
285
|
http_query_parameters=http_query_parameters,
|
|
285
|
-
|
|
286
|
+
treat_dot_as_module=treat_dot_as_module,
|
|
286
287
|
use_exact_imports=use_exact_imports,
|
|
287
288
|
default_field_extras=default_field_extras,
|
|
288
289
|
target_datetime_class=target_datetime_class,
|
|
289
290
|
keyword_only=keyword_only,
|
|
290
291
|
no_alias=no_alias,
|
|
292
|
+
formatters=formatters,
|
|
291
293
|
)
|
|
292
294
|
self.open_api_scopes: list[OpenAPIScope] = openapi_scopes or [OpenAPIScope.Schemas]
|
|
293
295
|
|
|
@@ -486,6 +488,7 @@ class OpenAPIParser(JsonSchemaParser):
|
|
|
486
488
|
custom_base_class=self.base_class,
|
|
487
489
|
custom_template_dir=self.custom_template_dir,
|
|
488
490
|
keyword_only=self.keyword_only,
|
|
491
|
+
treat_dot_as_module=self.treat_dot_as_module,
|
|
489
492
|
)
|
|
490
493
|
)
|
|
491
494
|
|
|
@@ -317,6 +317,7 @@ class ModelResolver: # noqa: PLR0904
|
|
|
317
317
|
remove_special_field_name_prefix: bool = False, # noqa: FBT001, FBT002
|
|
318
318
|
capitalise_enum_members: bool = False, # noqa: FBT001, FBT002
|
|
319
319
|
no_alias: bool = False, # noqa: FBT001, FBT002
|
|
320
|
+
remove_suffix_number: bool = False, # noqa: FBT001, FBT002
|
|
320
321
|
) -> None:
|
|
321
322
|
self.references: dict[str, Reference] = {}
|
|
322
323
|
self._current_root: Sequence[str] = []
|
|
@@ -349,6 +350,7 @@ class ModelResolver: # noqa: PLR0904
|
|
|
349
350
|
self.class_name_generator = custom_class_name_generator or self.default_class_name_generator
|
|
350
351
|
self._base_path: Path = base_path or Path.cwd()
|
|
351
352
|
self._current_base_path: Path | None = self._base_path
|
|
353
|
+
self.remove_suffix_number: bool = remove_suffix_number
|
|
352
354
|
|
|
353
355
|
@property
|
|
354
356
|
def current_base_path(self) -> Path | None:
|
|
@@ -615,7 +617,7 @@ class ModelResolver: # noqa: PLR0904
|
|
|
615
617
|
|
|
616
618
|
def _get_unique_name(self, name: str, camel: bool = False) -> str: # noqa: FBT001, FBT002
|
|
617
619
|
unique_name: str = name
|
|
618
|
-
count: int = 1
|
|
620
|
+
count: int = 0 if self.remove_suffix_number else 1
|
|
619
621
|
reference_names = {r.name for r in self.references.values()} | self.exclude_names
|
|
620
622
|
while unique_name in reference_names:
|
|
621
623
|
if self.duplicate_name_suffix:
|
|
@@ -627,7 +629,7 @@ class ModelResolver: # noqa: PLR0904
|
|
|
627
629
|
else:
|
|
628
630
|
name_parts = [name, count]
|
|
629
631
|
delimiter = "" if camel else "_"
|
|
630
|
-
unique_name = delimiter.join(str(p) for p in name_parts if p)
|
|
632
|
+
unique_name = delimiter.join(str(p) for p in name_parts if p) if count else name
|
|
631
633
|
count += 1
|
|
632
634
|
return unique_name
|
|
633
635
|
|
|
@@ -167,26 +167,67 @@ def _remove_none_from_type(type_: str, split_pattern: Pattern[str], delimiter: s
|
|
|
167
167
|
return types
|
|
168
168
|
|
|
169
169
|
|
|
170
|
-
def _remove_none_from_union(type_: str, use_union_operator: bool) -> str: # noqa:
|
|
170
|
+
def _remove_none_from_union(type_: str, *, use_union_operator: bool) -> str: # noqa: PLR0911, PLR0912
|
|
171
171
|
if use_union_operator:
|
|
172
|
-
if
|
|
172
|
+
if " | " not in type_:
|
|
173
173
|
return type_
|
|
174
|
-
return UNION_OPERATOR_DELIMITER.join(
|
|
175
|
-
_remove_none_from_type(type_, UNION_OPERATOR_PATTERN, UNION_OPERATOR_DELIMITER)
|
|
176
|
-
)
|
|
177
174
|
|
|
175
|
+
# Process each part of the union
|
|
176
|
+
parts = UNION_OPERATOR_PATTERN.split(type_)
|
|
177
|
+
processed_parts = []
|
|
178
|
+
for part in parts:
|
|
179
|
+
if part == NONE:
|
|
180
|
+
continue
|
|
181
|
+
|
|
182
|
+
# Check if this part contains a nested union
|
|
183
|
+
processed_part = _remove_none_from_union(part, use_union_operator=True) if " | " in part else part
|
|
184
|
+
processed_parts.append(processed_part)
|
|
185
|
+
|
|
186
|
+
if not processed_parts:
|
|
187
|
+
return NONE
|
|
188
|
+
|
|
189
|
+
return UNION_OPERATOR_DELIMITER.join(processed_parts)
|
|
178
190
|
if not type_.startswith(UNION_PREFIX):
|
|
179
191
|
return type_
|
|
180
|
-
inner_types = _remove_none_from_type(type_[len(UNION_PREFIX) :][:-1], UNION_PATTERN, UNION_DELIMITER)
|
|
181
192
|
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
193
|
+
inner_text = type_[len(UNION_PREFIX) : -1]
|
|
194
|
+
parts = []
|
|
195
|
+
inner_count = 0
|
|
196
|
+
current_part = ""
|
|
197
|
+
|
|
198
|
+
# Parse union parts carefully to handle nested structures
|
|
199
|
+
for char in inner_text:
|
|
200
|
+
current_part += char
|
|
201
|
+
if char == "[":
|
|
202
|
+
inner_count += 1
|
|
203
|
+
elif char == "]":
|
|
204
|
+
inner_count -= 1
|
|
205
|
+
elif char == "," and inner_count == 0:
|
|
206
|
+
part = current_part[:-1].strip()
|
|
207
|
+
if part != NONE:
|
|
208
|
+
# Process nested unions recursively
|
|
209
|
+
if part.startswith(UNION_PREFIX):
|
|
210
|
+
part = _remove_none_from_union(part, use_union_operator=False)
|
|
211
|
+
parts.append(part)
|
|
212
|
+
current_part = ""
|
|
213
|
+
|
|
214
|
+
part = current_part.strip()
|
|
215
|
+
if current_part and part != NONE:
|
|
216
|
+
if part.startswith(UNION_PREFIX):
|
|
217
|
+
part = _remove_none_from_union(part, use_union_operator=False)
|
|
218
|
+
parts.append(part)
|
|
219
|
+
|
|
220
|
+
if not parts:
|
|
221
|
+
return NONE
|
|
222
|
+
if len(parts) == 1:
|
|
223
|
+
return parts[0]
|
|
224
|
+
|
|
225
|
+
return f"{UNION_PREFIX}{UNION_DELIMITER.join(parts)}]"
|
|
185
226
|
|
|
186
227
|
|
|
187
228
|
@lru_cache
|
|
188
229
|
def get_optional_type(type_: str, use_union_operator: bool) -> str: # noqa: FBT001
|
|
189
|
-
type_ = _remove_none_from_union(type_, use_union_operator)
|
|
230
|
+
type_ = _remove_none_from_union(type_, use_union_operator=use_union_operator)
|
|
190
231
|
|
|
191
232
|
if not type_ or type_ == NONE:
|
|
192
233
|
return NONE
|
|
@@ -249,6 +290,7 @@ class DataType(_BaseModel):
|
|
|
249
290
|
children: list[Any] = [] # noqa: RUF012
|
|
250
291
|
strict: bool = False
|
|
251
292
|
dict_key: Optional[DataType] = None # noqa: UP045
|
|
293
|
+
treat_dot_as_module: bool = False
|
|
252
294
|
|
|
253
295
|
_exclude_fields: ClassVar[set[str]] = {"parent", "children"}
|
|
254
296
|
_pass_fields: ClassVar[set[str]] = {"parent", "children", "data_types", "reference"}
|
|
@@ -402,7 +444,9 @@ class DataType(_BaseModel):
|
|
|
402
444
|
self.is_optional = True
|
|
403
445
|
continue
|
|
404
446
|
|
|
405
|
-
non_optional_data_type_type = _remove_none_from_union(
|
|
447
|
+
non_optional_data_type_type = _remove_none_from_union(
|
|
448
|
+
data_type_type, use_union_operator=self.use_union_operator
|
|
449
|
+
)
|
|
406
450
|
|
|
407
451
|
if non_optional_data_type_type != data_type_type:
|
|
408
452
|
self.is_optional = True
|
|
@@ -526,6 +570,7 @@ class DataTypeManager(ABC):
|
|
|
526
570
|
use_union_operator: bool = False, # noqa: FBT001, FBT002
|
|
527
571
|
use_pendulum: bool = False, # noqa: FBT001, FBT002
|
|
528
572
|
target_datetime_class: DatetimeClassType | None = None,
|
|
573
|
+
treat_dot_as_module: bool = False, # noqa: FBT001, FBT002
|
|
529
574
|
) -> None:
|
|
530
575
|
self.python_version = python_version
|
|
531
576
|
self.use_standard_collections: bool = use_standard_collections
|
|
@@ -537,6 +582,7 @@ class DataTypeManager(ABC):
|
|
|
537
582
|
self.use_union_operator: bool = use_union_operator
|
|
538
583
|
self.use_pendulum: bool = use_pendulum
|
|
539
584
|
self.target_datetime_class: DatetimeClassType = target_datetime_class or DatetimeClassType.Datetime
|
|
585
|
+
self.treat_dot_as_module: bool = treat_dot_as_module
|
|
540
586
|
|
|
541
587
|
if TYPE_CHECKING:
|
|
542
588
|
self.data_type: type[DataType]
|
|
@@ -547,6 +593,7 @@ class DataTypeManager(ABC):
|
|
|
547
593
|
use_standard_collections=(bool, use_standard_collections),
|
|
548
594
|
use_generic_container=(bool, use_generic_container_types),
|
|
549
595
|
use_union_operator=(bool, use_union_operator),
|
|
596
|
+
treat_dot_as_module=(bool, treat_dot_as_module),
|
|
550
597
|
__base__=DataType,
|
|
551
598
|
)
|
|
552
599
|
|
{datamodel_code_generator-0.28.3.dist-info → datamodel_code_generator-0.28.5.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: datamodel-code-generator
|
|
3
|
-
Version: 0.28.
|
|
3
|
+
Version: 0.28.5
|
|
4
4
|
Summary: Datamodel Code Generator
|
|
5
5
|
Project-URL: Homepage, https://github.com/koxudaxi/datamodel-code-generator
|
|
6
6
|
Project-URL: Source, https://github.com/koxudaxi/datamodel-code-generator
|
|
@@ -34,12 +34,15 @@ Requires-Dist: httpx>=0.24.1; extra == 'all'
|
|
|
34
34
|
Requires-Dist: openapi-spec-validator<0.7,>=0.2.8; extra == 'all'
|
|
35
35
|
Requires-Dist: prance>=0.18.2; extra == 'all'
|
|
36
36
|
Requires-Dist: pysnooper<2,>=0.4.1; extra == 'all'
|
|
37
|
+
Requires-Dist: ruff>=0.9.10; extra == 'all'
|
|
37
38
|
Provides-Extra: debug
|
|
38
39
|
Requires-Dist: pysnooper<2,>=0.4.1; extra == 'debug'
|
|
39
40
|
Provides-Extra: graphql
|
|
40
41
|
Requires-Dist: graphql-core>=3.2.3; extra == 'graphql'
|
|
41
42
|
Provides-Extra: http
|
|
42
43
|
Requires-Dist: httpx>=0.24.1; extra == 'http'
|
|
44
|
+
Provides-Extra: ruff
|
|
45
|
+
Requires-Dist: ruff>=0.9.10; extra == 'ruff'
|
|
43
46
|
Provides-Extra: validation
|
|
44
47
|
Requires-Dist: openapi-spec-validator<0.7,>=0.2.8; extra == 'validation'
|
|
45
48
|
Requires-Dist: prance>=0.18.2; extra == 'validation'
|
|
@@ -403,6 +406,8 @@ Options:
|
|
|
403
406
|
"datetime.date,datetime.datetime"
|
|
404
407
|
--custom-formatters CUSTOM_FORMATTERS
|
|
405
408
|
List of modules with custom formatter (delimited list input).
|
|
409
|
+
--formatters {black,isort,ruff-check,ruff-format} [{black,isort,ruff-check,ruff-format} ...]
|
|
410
|
+
Formatters for output (default: [black, isort])
|
|
406
411
|
--http-headers HTTP_HEADER [HTTP_HEADER ...]
|
|
407
412
|
Set headers in HTTP requests to the remote host. (example:
|
|
408
413
|
"Authorization: Basic dXNlcjpwYXNz")
|
{datamodel_code_generator-0.28.3.dist-info → datamodel_code_generator-0.28.5.dist-info}/RECORD
RENAMED
|
@@ -1,33 +1,33 @@
|
|
|
1
|
-
datamodel_code_generator/__init__.py,sha256=
|
|
2
|
-
datamodel_code_generator/__main__.py,sha256=
|
|
3
|
-
datamodel_code_generator/arguments.py,sha256=
|
|
4
|
-
datamodel_code_generator/format.py,sha256=
|
|
1
|
+
datamodel_code_generator/__init__.py,sha256=sguYwixd5tfTNpa0TwfKFiYqEgwunUaVY2Aea7mCavk,20327
|
|
2
|
+
datamodel_code_generator/__main__.py,sha256=LMD988WjkqmzWjHXoNiiEI3KFnrq3kupbyiObDf1HpA,22137
|
|
3
|
+
datamodel_code_generator/arguments.py,sha256=LQyCC7tsDdy7ie7nbQVVW_79usVumX8O5pd99ZZ51ds,16466
|
|
4
|
+
datamodel_code_generator/format.py,sha256=zvX0KH1uWwGnTYoVM4KhAuKZn5erjkH5eyi4t3leirw,8962
|
|
5
5
|
datamodel_code_generator/http.py,sha256=LE94GC7I9D8lWIg_YAGWedfy0XNxOXTmiYKuNMTwouo,887
|
|
6
6
|
datamodel_code_generator/imports.py,sha256=Nq83WbEGCegntg3WX4VbKfzAIs84alZ7IrYyNPrlUbc,5517
|
|
7
7
|
datamodel_code_generator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
8
|
datamodel_code_generator/pydantic_patch.py,sha256=co1IUDvZqQ-xEZ3C9gbV-BVm2Cin1vfyZNr2Dr0LdHY,718
|
|
9
|
-
datamodel_code_generator/reference.py,sha256=
|
|
10
|
-
datamodel_code_generator/types.py,sha256=
|
|
9
|
+
datamodel_code_generator/reference.py,sha256=cnD6gGevRomez1dr16-KBz-GgDN8gCMQZqpwmb16CwA,26021
|
|
10
|
+
datamodel_code_generator/types.py,sha256=fobUZnNTOGpzF4qZMraoLogVkAU7zBdFVG-8SOFoDD4,21163
|
|
11
11
|
datamodel_code_generator/util.py,sha256=mZW8-6CbFe6T4IY5OM9Av6cH-0VknQGe2eIKjTM6Jzo,2729
|
|
12
12
|
datamodel_code_generator/model/__init__.py,sha256=pJlJ1juQ-Gv17ZKXy6OAfJSSoOAmYQ7QCbdneu1BENU,3594
|
|
13
|
-
datamodel_code_generator/model/base.py,sha256=
|
|
14
|
-
datamodel_code_generator/model/dataclass.py,sha256=
|
|
15
|
-
datamodel_code_generator/model/enum.py,sha256=
|
|
13
|
+
datamodel_code_generator/model/base.py,sha256=Ma28Vx1p5zsFt23BMCs1UNDXuBq_USEhuPUvaeFLkS4,14959
|
|
14
|
+
datamodel_code_generator/model/dataclass.py,sha256=t4NtgVhopTtEDUJCqijJheVlewSt8IJzQhQb8gXspfs,6252
|
|
15
|
+
datamodel_code_generator/model/enum.py,sha256=yriQslY1hag_Qk-Xv3vl_LkPnbmMZ3iRTAGiiyMN0Io,4003
|
|
16
16
|
datamodel_code_generator/model/imports.py,sha256=PTc09UzIBSsa5yAPoieb6hCGIohU2T1Y7igNy_pYarg,820
|
|
17
|
-
datamodel_code_generator/model/msgspec.py,sha256=
|
|
17
|
+
datamodel_code_generator/model/msgspec.py,sha256=qL2DIEwBfpn-vd8p8KEmUViMUce6RgI4Ql-drOmPR7M,11845
|
|
18
18
|
datamodel_code_generator/model/rootmodel.py,sha256=pY8G2SPjkafzfJ1L9P5sNdp8qe45UclpUYN86guRB3M,202
|
|
19
|
-
datamodel_code_generator/model/scalar.py,sha256=
|
|
20
|
-
datamodel_code_generator/model/typed_dict.py,sha256=
|
|
21
|
-
datamodel_code_generator/model/types.py,sha256=
|
|
22
|
-
datamodel_code_generator/model/union.py,sha256=
|
|
19
|
+
datamodel_code_generator/model/scalar.py,sha256=xfONEK30eYJ2mSL9PK9zXqEG5-xApYMI_gmKOn5qhK4,2664
|
|
20
|
+
datamodel_code_generator/model/typed_dict.py,sha256=FJi_fEZWuFe3nvidfl-jqr9PMRFTvfusoEFQkx1BqHI,4685
|
|
21
|
+
datamodel_code_generator/model/types.py,sha256=ZyEwi76EBI5RS5JfoNUoRHmjOAZDup8oNFvQQDizfwQ,3502
|
|
22
|
+
datamodel_code_generator/model/union.py,sha256=zwq1ayGFW3KbI4SxPCcdZcrM7X4Px25IdujDedtwgOw,1929
|
|
23
23
|
datamodel_code_generator/model/pydantic/__init__.py,sha256=CtyzSriGEYGp1yfHapjD5lrS2vkSNe8AqKSYO-XaRWc,1095
|
|
24
|
-
datamodel_code_generator/model/pydantic/base_model.py,sha256=
|
|
24
|
+
datamodel_code_generator/model/pydantic/base_model.py,sha256=qlGTxWYPS3XsHYwrRKjJDHvCixAxpWMEXTPtyAVXb6g,12182
|
|
25
25
|
datamodel_code_generator/model/pydantic/custom_root_type.py,sha256=VJpEAmGFe3TzMKrR5YvR7PJ3pfGHcYytO1zhQrWyoWg,299
|
|
26
26
|
datamodel_code_generator/model/pydantic/dataclass.py,sha256=jgjkqQk71CQP4RbTcPGSEOQDNqjTQnzFavvl5LjWTBw,455
|
|
27
27
|
datamodel_code_generator/model/pydantic/imports.py,sha256=nWPiLgDeYNPHcAs8M-gaUUZg1daQRHdBPpjYuX3b5u4,2225
|
|
28
|
-
datamodel_code_generator/model/pydantic/types.py,sha256=
|
|
28
|
+
datamodel_code_generator/model/pydantic/types.py,sha256=ttTiDsQ6FV3h4C_NTEhvPUmUpeqxBNQt-DJJFpKZS8s,13356
|
|
29
29
|
datamodel_code_generator/model/pydantic_v2/__init__.py,sha256=xsfYcIUA2S8XzPIsYQSzDuBYZ1XRicfhGLHlQBlZwsg,1226
|
|
30
|
-
datamodel_code_generator/model/pydantic_v2/base_model.py,sha256=
|
|
30
|
+
datamodel_code_generator/model/pydantic_v2/base_model.py,sha256=J_DxR6Auw0L-zHn0F5l9K8XtSmfEvDT26Bj-VZxihiE,8353
|
|
31
31
|
datamodel_code_generator/model/pydantic_v2/imports.py,sha256=K3XD2kF9YCKmo5_7b2ipV5bGUrjz0avS-SiyDMVIpF0,299
|
|
32
32
|
datamodel_code_generator/model/pydantic_v2/root_model.py,sha256=H4rwtg56N65-I3QHlPvlNhDcSPV0m56KSAgfGmxYXAQ,888
|
|
33
33
|
datamodel_code_generator/model/pydantic_v2/types.py,sha256=apEuIhCBa15WdwGA0R9cqjpiH8mTLgAgu02CjcU4Css,2064
|
|
@@ -48,12 +48,12 @@ datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2,sha256=XdSC
|
|
|
48
48
|
datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2,sha256=xHvBYrh__32O1xRCSl6_u5zbyYIjB8a5k8fZiTo0spY,149
|
|
49
49
|
datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2,sha256=XQBlML7Hm5hN6_AExENNvVc_yxNWijcIfTTbbmegCpE,1223
|
|
50
50
|
datamodel_code_generator/parser/__init__.py,sha256=3XtFcDPocaetfjmWFqj_CubqNCDipb7vXZHsYKdJXXU,851
|
|
51
|
-
datamodel_code_generator/parser/base.py,sha256=
|
|
52
|
-
datamodel_code_generator/parser/graphql.py,sha256=
|
|
53
|
-
datamodel_code_generator/parser/jsonschema.py,sha256=
|
|
54
|
-
datamodel_code_generator/parser/openapi.py,sha256=
|
|
55
|
-
datamodel_code_generator-0.28.
|
|
56
|
-
datamodel_code_generator-0.28.
|
|
57
|
-
datamodel_code_generator-0.28.
|
|
58
|
-
datamodel_code_generator-0.28.
|
|
59
|
-
datamodel_code_generator-0.28.
|
|
51
|
+
datamodel_code_generator/parser/base.py,sha256=COceRiOHtaOOS0gvDvKLNboxD4KY0ofJdmSdoyPBfNw,60618
|
|
52
|
+
datamodel_code_generator/parser/graphql.py,sha256=ODbkMp42fYh8kH81KNeAffcFTmJb6_hgMyCsLUmdpMo,22585
|
|
53
|
+
datamodel_code_generator/parser/jsonschema.py,sha256=J5Ec5Vf4LGjCRWPraBmpTkepq-tN0ri5MKB9bUSDdFc,69912
|
|
54
|
+
datamodel_code_generator/parser/openapi.py,sha256=MfHSVfwq7qx_YEOiNel-ABbwISdH-kp87Q09WeNr_5w,27180
|
|
55
|
+
datamodel_code_generator-0.28.5.dist-info/METADATA,sha256=UVX-nQFVHIBUGD1dqfDNsZ4qw4eOyiOsQI8MzkGrKUs,25189
|
|
56
|
+
datamodel_code_generator-0.28.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
57
|
+
datamodel_code_generator-0.28.5.dist-info/entry_points.txt,sha256=cJVcHiEViQMANaoM5C1xR5hzmyCqH6hHHMpV8W00in8,77
|
|
58
|
+
datamodel_code_generator-0.28.5.dist-info/licenses/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
|
|
59
|
+
datamodel_code_generator-0.28.5.dist-info/RECORD,,
|
{datamodel_code_generator-0.28.3.dist-info → datamodel_code_generator-0.28.5.dist-info}/WHEEL
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|