datamodel-code-generator 0.27.2__py3-none-any.whl → 0.28.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (43) hide show
  1. datamodel_code_generator/__init__.py +168 -196
  2. datamodel_code_generator/__main__.py +146 -189
  3. datamodel_code_generator/arguments.py +227 -230
  4. datamodel_code_generator/format.py +77 -129
  5. datamodel_code_generator/http.py +12 -10
  6. datamodel_code_generator/imports.py +59 -65
  7. datamodel_code_generator/model/__init__.py +28 -31
  8. datamodel_code_generator/model/base.py +100 -144
  9. datamodel_code_generator/model/dataclass.py +62 -70
  10. datamodel_code_generator/model/enum.py +34 -30
  11. datamodel_code_generator/model/imports.py +13 -11
  12. datamodel_code_generator/model/msgspec.py +116 -138
  13. datamodel_code_generator/model/pydantic/__init__.py +18 -28
  14. datamodel_code_generator/model/pydantic/base_model.py +121 -140
  15. datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
  16. datamodel_code_generator/model/pydantic/dataclass.py +6 -4
  17. datamodel_code_generator/model/pydantic/imports.py +35 -33
  18. datamodel_code_generator/model/pydantic/types.py +91 -119
  19. datamodel_code_generator/model/pydantic_v2/__init__.py +21 -18
  20. datamodel_code_generator/model/pydantic_v2/base_model.py +118 -127
  21. datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
  22. datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
  23. datamodel_code_generator/model/pydantic_v2/types.py +11 -7
  24. datamodel_code_generator/model/rootmodel.py +1 -1
  25. datamodel_code_generator/model/scalar.py +33 -32
  26. datamodel_code_generator/model/typed_dict.py +41 -51
  27. datamodel_code_generator/model/types.py +24 -19
  28. datamodel_code_generator/model/union.py +21 -17
  29. datamodel_code_generator/parser/__init__.py +16 -12
  30. datamodel_code_generator/parser/base.py +327 -515
  31. datamodel_code_generator/parser/graphql.py +87 -119
  32. datamodel_code_generator/parser/jsonschema.py +438 -607
  33. datamodel_code_generator/parser/openapi.py +180 -220
  34. datamodel_code_generator/pydantic_patch.py +8 -9
  35. datamodel_code_generator/reference.py +199 -297
  36. datamodel_code_generator/types.py +149 -215
  37. datamodel_code_generator/util.py +23 -36
  38. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/METADATA +10 -5
  39. datamodel_code_generator-0.28.0.dist-info/RECORD +59 -0
  40. datamodel_code_generator-0.27.2.dist-info/RECORD +0 -59
  41. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/WHEEL +0 -0
  42. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/entry_points.txt +0 -0
  43. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,22 +1,11 @@
1
+ from __future__ import annotations
2
+
1
3
  from functools import wraps
2
- from pathlib import Path
3
- from typing import (
4
- Any,
5
- ClassVar,
6
- DefaultDict,
7
- Dict,
8
- List,
9
- Optional,
10
- Sequence,
11
- Set,
12
- Tuple,
13
- Type,
14
- TypeVar,
15
- )
4
+ from typing import TYPE_CHECKING, Any, ClassVar, Optional, TypeVar
16
5
 
17
6
  from pydantic import Field
18
7
 
19
- from datamodel_code_generator import DatetimeClassType, PythonVersion
8
+ from datamodel_code_generator import DatetimeClassType, PythonVersion, PythonVersionMin
20
9
  from datamodel_code_generator.imports import (
21
10
  IMPORT_DATE,
22
11
  IMPORT_DATETIME,
@@ -38,7 +27,6 @@ from datamodel_code_generator.model.pydantic.base_model import (
38
27
  from datamodel_code_generator.model.rootmodel import RootModel as _RootModel
39
28
  from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
40
29
  from datamodel_code_generator.model.types import type_map_factory
41
- from datamodel_code_generator.reference import Reference
42
30
  from datamodel_code_generator.types import (
43
31
  DataType,
44
32
  StrictTypes,
@@ -47,36 +35,40 @@ from datamodel_code_generator.types import (
47
35
  get_optional_type,
48
36
  )
49
37
 
38
+ if TYPE_CHECKING:
39
+ from collections import defaultdict
40
+ from collections.abc import Sequence
41
+ from pathlib import Path
42
+
43
+ from datamodel_code_generator.reference import Reference
44
+
50
45
 
51
46
  def _has_field_assignment(field: DataModelFieldBase) -> bool:
52
- return not (
53
- field.required
54
- or (field.represented_default == 'None' and field.strip_default_none)
55
- )
47
+ return not (field.required or (field.represented_default == "None" and field.strip_default_none))
56
48
 
57
49
 
58
- DataModelFieldBaseT = TypeVar('DataModelFieldBaseT', bound=DataModelFieldBase)
50
+ DataModelFieldBaseT = TypeVar("DataModelFieldBaseT", bound=DataModelFieldBase)
59
51
 
60
52
 
61
- def import_extender(cls: Type[DataModelFieldBaseT]) -> Type[DataModelFieldBaseT]:
62
- original_imports: property = getattr(cls, 'imports', None) # type: ignore
53
+ def import_extender(cls: type[DataModelFieldBaseT]) -> type[DataModelFieldBaseT]:
54
+ original_imports: property = cls.imports
63
55
 
64
- @wraps(original_imports.fget) # type: ignore
65
- def new_imports(self: DataModelFieldBaseT) -> Tuple[Import, ...]:
56
+ @wraps(original_imports.fget) # pyright: ignore[reportArgumentType]
57
+ def new_imports(self: DataModelFieldBaseT) -> tuple[Import, ...]:
66
58
  extra_imports = []
67
59
  field = self.field
68
60
  # TODO: Improve field detection
69
- if field and field.startswith('field('):
61
+ if field and field.startswith("field("):
70
62
  extra_imports.append(IMPORT_MSGSPEC_FIELD)
71
- if self.field and 'lambda: convert' in self.field:
63
+ if self.field and "lambda: convert" in self.field:
72
64
  extra_imports.append(IMPORT_MSGSPEC_CONVERT)
73
65
  if self.annotated:
74
66
  extra_imports.append(IMPORT_MSGSPEC_META)
75
- if self.extras.get('is_classvar'):
67
+ if self.extras.get("is_classvar"):
76
68
  extra_imports.append(IMPORT_CLASSVAR)
77
- return chain_as_tuple(original_imports.fget(self), extra_imports) # type: ignore
69
+ return chain_as_tuple(original_imports.fget(self), extra_imports) # pyright: ignore[reportOptionalCall]
78
70
 
79
- setattr(cls, 'imports', property(new_imports))
71
+ cls.imports = property(new_imports) # pyright: ignore[reportAttributeAccessIssue]
80
72
  return cls
81
73
 
82
74
 
@@ -85,30 +77,30 @@ class RootModel(_RootModel):
85
77
 
86
78
 
87
79
  class Struct(DataModel):
88
- TEMPLATE_FILE_PATH: ClassVar[str] = 'msgspec.jinja2'
89
- BASE_CLASS: ClassVar[str] = 'msgspec.Struct'
90
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = ()
80
+ TEMPLATE_FILE_PATH: ClassVar[str] = "msgspec.jinja2"
81
+ BASE_CLASS: ClassVar[str] = "msgspec.Struct"
82
+ DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = ()
91
83
 
92
- def __init__(
84
+ def __init__( # noqa: PLR0913
93
85
  self,
94
86
  *,
95
87
  reference: Reference,
96
- fields: List[DataModelFieldBase],
97
- decorators: Optional[List[str]] = None,
98
- base_classes: Optional[List[Reference]] = None,
99
- custom_base_class: Optional[str] = None,
100
- custom_template_dir: Optional[Path] = None,
101
- extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
102
- methods: Optional[List[str]] = None,
103
- path: Optional[Path] = None,
104
- description: Optional[str] = None,
88
+ fields: list[DataModelFieldBase],
89
+ decorators: list[str] | None = None,
90
+ base_classes: list[Reference] | None = None,
91
+ custom_base_class: str | None = None,
92
+ custom_template_dir: Path | None = None,
93
+ extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
94
+ methods: list[str] | None = None,
95
+ path: Path | None = None,
96
+ description: str | None = None,
105
97
  default: Any = UNDEFINED,
106
98
  nullable: bool = False,
107
99
  keyword_only: bool = False,
108
100
  ) -> None:
109
101
  super().__init__(
110
102
  reference=reference,
111
- fields=sorted(fields, key=_has_field_assignment, reverse=False),
103
+ fields=sorted(fields, key=_has_field_assignment),
112
104
  decorators=decorators,
113
105
  base_classes=base_classes,
114
106
  custom_base_class=custom_base_class,
@@ -121,45 +113,45 @@ class Struct(DataModel):
121
113
  nullable=nullable,
122
114
  keyword_only=keyword_only,
123
115
  )
124
- self.extra_template_data.setdefault('base_class_kwargs', {})
116
+ self.extra_template_data.setdefault("base_class_kwargs", {})
125
117
  if self.keyword_only:
126
- self.add_base_class_kwarg('kw_only', 'True')
118
+ self.add_base_class_kwarg("kw_only", "True")
127
119
 
128
- def add_base_class_kwarg(self, name: str, value):
129
- self.extra_template_data['base_class_kwargs'][name] = value
120
+ def add_base_class_kwarg(self, name: str, value: str) -> None:
121
+ self.extra_template_data["base_class_kwargs"][name] = value
130
122
 
131
123
 
132
124
  class Constraints(_Constraints):
133
125
  # To override existing pattern alias
134
- regex: Optional[str] = Field(None, alias='regex')
135
- pattern: Optional[str] = Field(None, alias='pattern')
126
+ regex: Optional[str] = Field(None, alias="regex") # noqa: UP045
127
+ pattern: Optional[str] = Field(None, alias="pattern") # noqa: UP045
136
128
 
137
129
 
138
130
  @import_extender
139
131
  class DataModelField(DataModelFieldBase):
140
- _FIELD_KEYS: ClassVar[Set[str]] = {
141
- 'default',
142
- 'default_factory',
132
+ _FIELD_KEYS: ClassVar[set[str]] = {
133
+ "default",
134
+ "default_factory",
143
135
  }
144
- _META_FIELD_KEYS: ClassVar[Set[str]] = {
145
- 'title',
146
- 'description',
147
- 'gt',
148
- 'ge',
149
- 'lt',
150
- 'le',
151
- 'multiple_of',
136
+ _META_FIELD_KEYS: ClassVar[set[str]] = {
137
+ "title",
138
+ "description",
139
+ "gt",
140
+ "ge",
141
+ "lt",
142
+ "le",
143
+ "multiple_of",
152
144
  # 'min_items', # not supported by msgspec
153
145
  # 'max_items', # not supported by msgspec
154
- 'min_length',
155
- 'max_length',
156
- 'pattern',
157
- 'examples',
146
+ "min_length",
147
+ "max_length",
148
+ "pattern",
149
+ "examples",
158
150
  # 'unique_items', # not supported by msgspec
159
151
  }
160
- _PARSE_METHOD = 'convert'
161
- _COMPARE_EXPRESSIONS: ClassVar[Set[str]] = {'gt', 'ge', 'lt', 'le', 'multiple_of'}
162
- constraints: Optional[Constraints] = None
152
+ _PARSE_METHOD = "convert"
153
+ _COMPARE_EXPRESSIONS: ClassVar[set[str]] = {"gt", "ge", "lt", "le", "multiple_of"}
154
+ constraints: Optional[Constraints] = None # noqa: UP045
163
155
 
164
156
  def self_reference(self) -> bool: # pragma: no cover
165
157
  return isinstance(self.parent, Struct) and self.parent.reference.path in {
@@ -167,87 +159,72 @@ class DataModelField(DataModelFieldBase):
167
159
  }
168
160
 
169
161
  def process_const(self) -> None:
170
- if 'const' not in self.extras:
171
- return None
162
+ if "const" not in self.extras:
163
+ return
172
164
  self.const = True
173
165
  self.nullable = False
174
- const = self.extras['const']
175
- if self.data_type.type == 'str' and isinstance(
176
- const, str
177
- ): # pragma: no cover # Literal supports only str
166
+ const = self.extras["const"]
167
+ if self.data_type.type == "str" and isinstance(const, str): # pragma: no cover # Literal supports only str
178
168
  self.data_type = self.data_type.__class__(literals=[const])
179
169
 
180
170
  def _get_strict_field_constraint_value(self, constraint: str, value: Any) -> Any:
181
171
  if value is None or constraint not in self._COMPARE_EXPRESSIONS:
182
172
  return value
183
173
 
184
- if any(
185
- data_type.type == 'float' for data_type in self.data_type.all_data_types
186
- ):
174
+ if any(data_type.type == "float" for data_type in self.data_type.all_data_types):
187
175
  return float(value)
188
176
  return int(value)
189
177
 
190
178
  @property
191
- def field(self) -> Optional[str]:
179
+ def field(self) -> str | None:
192
180
  """for backwards compatibility"""
193
181
  result = str(self)
194
- if result == '':
182
+ if not result:
195
183
  return None
196
-
197
184
  return result
198
185
 
199
186
  def __str__(self) -> str:
200
- data: Dict[str, Any] = {
201
- k: v for k, v in self.extras.items() if k in self._FIELD_KEYS
202
- }
187
+ data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._FIELD_KEYS}
203
188
  if self.alias:
204
- data['name'] = self.alias
189
+ data["name"] = self.alias
205
190
 
206
191
  if self.default != UNDEFINED and self.default is not None:
207
- data['default'] = self.default
192
+ data["default"] = self.default
208
193
  elif not self.required:
209
- data['default'] = None
194
+ data["default"] = None
210
195
 
211
196
  if self.required:
212
197
  data = {
213
198
  k: v
214
199
  for k, v in data.items()
215
200
  if k
216
- not in (
217
- 'default',
218
- 'default_factory',
219
- )
201
+ not in {
202
+ "default",
203
+ "default_factory",
204
+ }
220
205
  }
221
- elif self.default and 'default_factory' not in data:
206
+ elif self.default and "default_factory" not in data:
222
207
  default_factory = self._get_default_as_struct_model()
223
208
  if default_factory is not None:
224
- data.pop('default')
225
- data['default_factory'] = default_factory
209
+ data.pop("default")
210
+ data["default_factory"] = default_factory
226
211
 
227
212
  if not data:
228
- return ''
213
+ return ""
229
214
 
230
- if len(data) == 1 and 'default' in data:
231
- return repr(data['default'])
215
+ if len(data) == 1 and "default" in data:
216
+ return repr(data["default"])
232
217
 
233
- kwargs = [
234
- f'{k}={v if k == "default_factory" else repr(v)}' for k, v in data.items()
235
- ]
236
- return f'field({", ".join(kwargs)})'
218
+ kwargs = [f"{k}={v if k == 'default_factory' else repr(v)}" for k, v in data.items()]
219
+ return f"field({', '.join(kwargs)})"
237
220
 
238
221
  @property
239
- def annotated(self) -> Optional[str]:
222
+ def annotated(self) -> str | None:
240
223
  if not self.use_annotated: # pragma: no cover
241
224
  return None
242
225
 
243
- data: Dict[str, Any] = {
244
- k: v for k, v in self.extras.items() if k in self._META_FIELD_KEYS
245
- }
246
- if (
247
- self.constraints is not None
248
- and not self.self_reference()
249
- and not self.data_type.strict
250
- ):
226
+ data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._META_FIELD_KEYS}
227
+ if self.constraints is not None and not self.self_reference() and not self.data_type.strict:
251
228
  data = {
252
229
  **data,
253
230
  **{
@@ -257,59 +234,60 @@ class DataModelField(DataModelFieldBase):
257
234
  },
258
235
  }
259
236
 
260
- meta_arguments = sorted(
261
- f'{k}={repr(v)}' for k, v in data.items() if v is not None
262
- )
237
+ meta_arguments = sorted(f"{k}={v!r}" for k, v in data.items() if v is not None)
263
238
  if not meta_arguments:
264
239
  return None
265
240
 
266
- meta = f'Meta({", ".join(meta_arguments)})'
241
+ meta = f"Meta({', '.join(meta_arguments)})"
267
242
 
268
- if not self.required and not self.extras.get('is_classvar'):
243
+ if not self.required and not self.extras.get("is_classvar"):
269
244
  type_hint = self.data_type.type_hint
270
- annotated_type = f'Annotated[{type_hint}, {meta}]'
245
+ annotated_type = f"Annotated[{type_hint}, {meta}]"
271
246
  return get_optional_type(annotated_type, self.data_type.use_union_operator)
272
247
 
273
- annotated_type = f'Annotated[{self.type_hint}, {meta}]'
274
- if self.extras.get('is_classvar'):
275
- annotated_type = f'ClassVar[{annotated_type}]'
248
+ annotated_type = f"Annotated[{self.type_hint}, {meta}]"
249
+ if self.extras.get("is_classvar"):
250
+ annotated_type = f"ClassVar[{annotated_type}]"
276
251
 
277
252
  return annotated_type
278
253
 
279
- def _get_default_as_struct_model(self) -> Optional[str]:
254
+ def _get_default_as_struct_model(self) -> str | None:
280
255
  for data_type in self.data_type.data_types or (self.data_type,):
281
256
  # TODO: Check nested data_types
282
257
  if data_type.is_dict or self.data_type.is_union:
283
258
  # TODO: Parse Union and dict model for default
284
259
  continue # pragma: no cover
285
- elif data_type.is_list and len(data_type.data_types) == 1:
286
- data_type = data_type.data_types[0]
260
+ if data_type.is_list and len(data_type.data_types) == 1:
261
+ data_type_child = data_type.data_types[0]
287
262
  if ( # pragma: no cover
288
- data_type.reference
289
- and (
290
- isinstance(data_type.reference.source, Struct)
291
- or isinstance(data_type.reference.source, RootModel)
292
- )
263
+ data_type_child.reference
264
+ and (isinstance(data_type_child.reference.source, (Struct, RootModel)))
293
265
  and isinstance(self.default, list)
294
266
  ):
295
- return f'lambda: {self._PARSE_METHOD}({repr(self.default)}, type=list[{data_type.alias or data_type.reference.source.class_name}])'
267
+ return (
268
+ f"lambda: {self._PARSE_METHOD}({self.default!r}, "
269
+ f"type=list[{data_type_child.alias or data_type_child.reference.source.class_name}])"
270
+ )
296
271
  elif data_type.reference and isinstance(data_type.reference.source, Struct):
297
- return f'lambda: {self._PARSE_METHOD}({repr(self.default)}, type={data_type.alias or data_type.reference.source.class_name})'
272
+ return (
273
+ f"lambda: {self._PARSE_METHOD}({self.default!r}, "
274
+ f"type={data_type.alias or data_type.reference.source.class_name})"
275
+ )
298
276
  return None
299
277
 
300
278
 
301
279
  class DataTypeManager(_DataTypeManager):
302
- def __init__(
280
+ def __init__( # noqa: PLR0913, PLR0917
303
281
  self,
304
- python_version: PythonVersion = PythonVersion.PY_38,
305
- use_standard_collections: bool = False,
306
- use_generic_container_types: bool = False,
307
- strict_types: Optional[Sequence[StrictTypes]] = None,
308
- use_non_positive_negative_number_constrained_types: bool = False,
309
- use_union_operator: bool = False,
310
- use_pendulum: bool = False,
282
+ python_version: PythonVersion = PythonVersionMin,
283
+ use_standard_collections: bool = False, # noqa: FBT001, FBT002
284
+ use_generic_container_types: bool = False, # noqa: FBT001, FBT002
285
+ strict_types: Sequence[StrictTypes] | None = None,
286
+ use_non_positive_negative_number_constrained_types: bool = False, # noqa: FBT001, FBT002
287
+ use_union_operator: bool = False, # noqa: FBT001, FBT002
288
+ use_pendulum: bool = False, # noqa: FBT001, FBT002
311
289
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
312
- ):
290
+ ) -> None:
313
291
  super().__init__(
314
292
  python_version,
315
293
  use_standard_collections,
@@ -332,7 +310,7 @@ class DataTypeManager(_DataTypeManager):
332
310
  else {}
333
311
  )
334
312
 
335
- self.type_map: Dict[Types, DataType] = {
313
+ self.type_map: dict[Types, DataType] = {
336
314
  **type_map_factory(self.data_type),
337
315
  **datetime_map,
338
316
  }
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Iterable, Optional
3
+ from typing import TYPE_CHECKING, Optional
4
4
 
5
5
  from pydantic import BaseModel as _BaseModel
6
6
 
@@ -9,39 +9,29 @@ from .custom_root_type import CustomRootType
9
9
  from .dataclass import DataClass
10
10
  from .types import DataTypeManager
11
11
 
12
+ if TYPE_CHECKING:
13
+ from collections.abc import Iterable
14
+
12
15
 
13
16
  def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
14
- return '\n'.join(
15
- f'{class_name}.update_forward_refs()' for class_name in class_names
16
- )
17
+ return "\n".join(f"{class_name}.update_forward_refs()" for class_name in class_names)
17
18
 
18
19
 
19
20
  class Config(_BaseModel):
20
- extra: Optional[str] = None
21
- title: Optional[str] = None
22
- allow_population_by_field_name: Optional[bool] = None
23
- allow_extra_fields: Optional[bool] = None
24
- allow_mutation: Optional[bool] = None
25
- arbitrary_types_allowed: Optional[bool] = None
26
- orm_mode: Optional[bool] = None
27
-
28
-
29
- # def get_validator_template() -> Template:
30
- # template_file_path: Path = Path('pydantic') / 'one_of_validator.jinja2'
31
- # loader = FileSystemLoader(str(TEMPLATE_DIR / template_file_path.parent))
32
- # environment: Environment = Environment(loader=loader, autoescape=True)
33
- # return environment.get_template(template_file_path.name)
34
- #
35
- #
36
- # VALIDATOR_TEMPLATE: Template = get_validator_template()
21
+ extra: Optional[str] = None # noqa: UP045
22
+ title: Optional[str] = None # noqa: UP045
23
+ allow_population_by_field_name: Optional[bool] = None # noqa: UP045
24
+ allow_extra_fields: Optional[bool] = None # noqa: UP045
25
+ allow_mutation: Optional[bool] = None # noqa: UP045
26
+ arbitrary_types_allowed: Optional[bool] = None # noqa: UP045
27
+ orm_mode: Optional[bool] = None # noqa: UP045
37
28
 
38
29
 
39
30
  __all__ = [
40
- 'BaseModel',
41
- 'DataModelField',
42
- 'CustomRootType',
43
- 'DataClass',
44
- 'dump_resolve_reference_action',
45
- 'DataTypeManager',
46
- # 'VALIDATOR_TEMPLATE',
31
+ "BaseModel",
32
+ "CustomRootType",
33
+ "DataClass",
34
+ "DataModelField",
35
+ "DataTypeManager",
36
+ "dump_resolve_reference_action",
47
37
  ]