datamodel-code-generator 0.27.1__py3-none-any.whl → 0.27.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (43) hide show
  1. datamodel_code_generator/__init__.py +159 -190
  2. datamodel_code_generator/__main__.py +151 -173
  3. datamodel_code_generator/arguments.py +227 -230
  4. datamodel_code_generator/format.py +77 -99
  5. datamodel_code_generator/http.py +9 -10
  6. datamodel_code_generator/imports.py +57 -64
  7. datamodel_code_generator/model/__init__.py +26 -31
  8. datamodel_code_generator/model/base.py +94 -127
  9. datamodel_code_generator/model/dataclass.py +58 -59
  10. datamodel_code_generator/model/enum.py +34 -30
  11. datamodel_code_generator/model/imports.py +13 -11
  12. datamodel_code_generator/model/msgspec.py +112 -126
  13. datamodel_code_generator/model/pydantic/__init__.py +14 -27
  14. datamodel_code_generator/model/pydantic/base_model.py +120 -139
  15. datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
  16. datamodel_code_generator/model/pydantic/dataclass.py +6 -4
  17. datamodel_code_generator/model/pydantic/imports.py +35 -33
  18. datamodel_code_generator/model/pydantic/types.py +86 -117
  19. datamodel_code_generator/model/pydantic_v2/__init__.py +17 -17
  20. datamodel_code_generator/model/pydantic_v2/base_model.py +118 -119
  21. datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
  22. datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
  23. datamodel_code_generator/model/pydantic_v2/types.py +8 -7
  24. datamodel_code_generator/model/rootmodel.py +1 -1
  25. datamodel_code_generator/model/scalar.py +33 -32
  26. datamodel_code_generator/model/typed_dict.py +42 -41
  27. datamodel_code_generator/model/types.py +19 -17
  28. datamodel_code_generator/model/union.py +21 -17
  29. datamodel_code_generator/parser/__init__.py +12 -11
  30. datamodel_code_generator/parser/base.py +320 -492
  31. datamodel_code_generator/parser/graphql.py +80 -111
  32. datamodel_code_generator/parser/jsonschema.py +422 -580
  33. datamodel_code_generator/parser/openapi.py +175 -204
  34. datamodel_code_generator/pydantic_patch.py +8 -9
  35. datamodel_code_generator/reference.py +192 -274
  36. datamodel_code_generator/types.py +147 -182
  37. datamodel_code_generator/util.py +22 -26
  38. {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/METADATA +12 -11
  39. datamodel_code_generator-0.27.3.dist-info/RECORD +59 -0
  40. datamodel_code_generator-0.27.1.dist-info/RECORD +0 -59
  41. {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/WHEEL +0 -0
  42. {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/entry_points.txt +0 -0
  43. {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/licenses/LICENSE +0 -0
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  from enum import Enum
4
4
  from importlib import import_module
5
5
  from pathlib import Path
6
- from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence
6
+ from typing import TYPE_CHECKING, Any, Sequence
7
7
  from warnings import warn
8
8
 
9
9
  import black
@@ -18,28 +18,28 @@ except ImportError: # pragma: no cover
18
18
 
19
19
 
20
20
  class DatetimeClassType(Enum):
21
- Datetime = 'datetime'
22
- Awaredatetime = 'AwareDatetime'
23
- Naivedatetime = 'NaiveDatetime'
21
+ Datetime = "datetime"
22
+ Awaredatetime = "AwareDatetime"
23
+ Naivedatetime = "NaiveDatetime"
24
24
 
25
25
 
26
26
  class PythonVersion(Enum):
27
- PY_36 = '3.6'
28
- PY_37 = '3.7'
29
- PY_38 = '3.8'
30
- PY_39 = '3.9'
31
- PY_310 = '3.10'
32
- PY_311 = '3.11'
33
- PY_312 = '3.12'
34
- PY_313 = '3.13'
27
+ PY_36 = "3.6"
28
+ PY_37 = "3.7"
29
+ PY_38 = "3.8"
30
+ PY_39 = "3.9"
31
+ PY_310 = "3.10"
32
+ PY_311 = "3.11"
33
+ PY_312 = "3.12"
34
+ PY_313 = "3.13"
35
35
 
36
36
  @cached_property
37
37
  def _is_py_38_or_later(self) -> bool: # pragma: no cover
38
- return self.value not in {self.PY_36.value, self.PY_37.value} # type: ignore
38
+ return self.value not in {self.PY_36.value, self.PY_37.value}
39
39
 
40
40
  @cached_property
41
41
  def _is_py_39_or_later(self) -> bool: # pragma: no cover
42
- return self.value not in {self.PY_36.value, self.PY_37.value, self.PY_38.value} # type: ignore
42
+ return self.value not in {self.PY_36.value, self.PY_37.value, self.PY_38.value}
43
43
 
44
44
  @cached_property
45
45
  def _is_py_310_or_later(self) -> bool: # pragma: no cover
@@ -48,7 +48,7 @@ class PythonVersion(Enum):
48
48
  self.PY_37.value,
49
49
  self.PY_38.value,
50
50
  self.PY_39.value,
51
- } # type: ignore
51
+ }
52
52
 
53
53
  @cached_property
54
54
  def _is_py_311_or_later(self) -> bool: # pragma: no cover
@@ -58,7 +58,7 @@ class PythonVersion(Enum):
58
58
  self.PY_38.value,
59
59
  self.PY_39.value,
60
60
  self.PY_310.value,
61
- } # type: ignore
61
+ }
62
62
 
63
63
  @property
64
64
  def has_literal_type(self) -> bool:
@@ -89,12 +89,12 @@ if TYPE_CHECKING:
89
89
 
90
90
  class _TargetVersion(Enum): ...
91
91
 
92
- BLACK_PYTHON_VERSION: Dict[PythonVersion, _TargetVersion]
92
+ BLACK_PYTHON_VERSION: dict[PythonVersion, _TargetVersion]
93
93
  else:
94
- BLACK_PYTHON_VERSION: Dict[PythonVersion, black.TargetVersion] = {
95
- v: getattr(black.TargetVersion, f'PY{v.name.split("_")[-1]}')
94
+ BLACK_PYTHON_VERSION: dict[PythonVersion, black.TargetVersion] = {
95
+ v: getattr(black.TargetVersion, f"PY{v.name.split('_')[-1]}")
96
96
  for v in PythonVersion
97
- if hasattr(black.TargetVersion, f'PY{v.name.split("_")[-1]}')
97
+ if hasattr(black.TargetVersion, f"PY{v.name.split('_')[-1]}")
98
98
  }
99
99
 
100
100
 
@@ -104,132 +104,111 @@ def is_supported_in_black(python_version: PythonVersion) -> bool: # pragma: no
104
104
 
105
105
  def black_find_project_root(sources: Sequence[Path]) -> Path:
106
106
  if TYPE_CHECKING:
107
- from typing import Iterable, Tuple, Union
107
+ from typing import Iterable # noqa: PLC0415
108
108
 
109
109
  def _find_project_root(
110
- srcs: Union[Sequence[str], Iterable[str]],
111
- ) -> Union[Tuple[Path, str], Path]: ...
110
+ srcs: Sequence[str] | Iterable[str],
111
+ ) -> tuple[Path, str] | Path: ...
112
112
 
113
113
  else:
114
- from black import find_project_root as _find_project_root
114
+ from black import find_project_root as _find_project_root # noqa: PLC0415
115
115
  project_root = _find_project_root(tuple(str(s) for s in sources))
116
116
  if isinstance(project_root, tuple):
117
117
  return project_root[0]
118
- else: # pragma: no cover
119
- return project_root
118
+ # pragma: no cover
119
+ return project_root
120
120
 
121
121
 
122
122
  class CodeFormatter:
123
- def __init__(
123
+ def __init__( # noqa: PLR0912, PLR0913, PLR0917
124
124
  self,
125
125
  python_version: PythonVersion,
126
- settings_path: Optional[Path] = None,
127
- wrap_string_literal: Optional[bool] = None,
128
- skip_string_normalization: bool = True,
129
- known_third_party: Optional[List[str]] = None,
130
- custom_formatters: Optional[List[str]] = None,
131
- custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
126
+ settings_path: Path | None = None,
127
+ wrap_string_literal: bool | None = None, # noqa: FBT001
128
+ skip_string_normalization: bool = True, # noqa: FBT001, FBT002
129
+ known_third_party: list[str] | None = None,
130
+ custom_formatters: list[str] | None = None,
131
+ custom_formatters_kwargs: dict[str, Any] | None = None,
132
132
  ) -> None:
133
133
  if not settings_path:
134
- settings_path = Path().resolve()
134
+ settings_path = Path.cwd()
135
135
 
136
136
  root = black_find_project_root((settings_path,))
137
- path = root / 'pyproject.toml'
137
+ path = root / "pyproject.toml"
138
138
  if path.is_file():
139
139
  pyproject_toml = load_toml(path)
140
- config = pyproject_toml.get('tool', {}).get('black', {})
140
+ config = pyproject_toml.get("tool", {}).get("black", {})
141
141
  else:
142
142
  config = {}
143
143
 
144
- black_kwargs: Dict[str, Any] = {}
144
+ black_kwargs: dict[str, Any] = {}
145
145
  if wrap_string_literal is not None:
146
146
  experimental_string_processing = wrap_string_literal
147
+ elif black.__version__ < "24.1.0":
148
+ experimental_string_processing = config.get("experimental-string-processing")
147
149
  else:
148
- if black.__version__ < '24.1.0': # type: ignore
149
- experimental_string_processing = config.get(
150
- 'experimental-string-processing'
151
- )
152
- else:
153
- experimental_string_processing = config.get(
154
- 'preview', False
155
- ) and ( # pragma: no cover
156
- config.get('unstable', False)
157
- or 'string_processing' in config.get('enable-unstable-feature', [])
158
- )
150
+ experimental_string_processing = config.get("preview", False) and ( # pragma: no cover
151
+ config.get("unstable", False) or "string_processing" in config.get("enable-unstable-feature", [])
152
+ )
159
153
 
160
154
  if experimental_string_processing is not None: # pragma: no cover
161
- if black.__version__.startswith('19.'): # type: ignore
155
+ if black.__version__.startswith("19."):
162
156
  warn(
163
- f"black doesn't support `experimental-string-processing` option" # type: ignore
164
- f' for wrapping string literal in {black.__version__}'
165
- )
166
- elif black.__version__ < '24.1.0': # type: ignore
167
- black_kwargs['experimental_string_processing'] = (
168
- experimental_string_processing
157
+ f"black doesn't support `experimental-string-processing` option"
158
+ f" for wrapping string literal in {black.__version__}",
159
+ stacklevel=2,
169
160
  )
161
+ elif black.__version__ < "24.1.0":
162
+ black_kwargs["experimental_string_processing"] = experimental_string_processing
170
163
  elif experimental_string_processing:
171
- black_kwargs['preview'] = True
172
- black_kwargs['unstable'] = config.get('unstable', False)
173
- black_kwargs['enabled_features'] = {
174
- black.mode.Preview.string_processing
175
- }
164
+ black_kwargs["preview"] = True
165
+ black_kwargs["unstable"] = config.get("unstable", False)
166
+ black_kwargs["enabled_features"] = {black.mode.Preview.string_processing}
176
167
 
177
168
  if TYPE_CHECKING:
178
169
  self.black_mode: black.FileMode
179
170
  else:
180
171
  self.black_mode = black.FileMode(
181
172
  target_versions={BLACK_PYTHON_VERSION[python_version]},
182
- line_length=config.get('line-length', black.DEFAULT_LINE_LENGTH),
183
- string_normalization=not skip_string_normalization
184
- or not config.get('skip-string-normalization', True),
173
+ line_length=config.get("line-length", black.DEFAULT_LINE_LENGTH),
174
+ string_normalization=not skip_string_normalization or not config.get("skip-string-normalization", True),
185
175
  **black_kwargs,
186
176
  )
187
177
 
188
178
  self.settings_path: str = str(settings_path)
189
179
 
190
- self.isort_config_kwargs: Dict[str, Any] = {}
180
+ self.isort_config_kwargs: dict[str, Any] = {}
191
181
  if known_third_party:
192
- self.isort_config_kwargs['known_third_party'] = known_third_party
182
+ self.isort_config_kwargs["known_third_party"] = known_third_party
193
183
 
194
- if isort.__version__.startswith('4.'):
184
+ if isort.__version__.startswith("4."):
195
185
  self.isort_config = None
196
186
  else:
197
- self.isort_config = isort.Config(
198
- settings_path=self.settings_path, **self.isort_config_kwargs
199
- )
187
+ self.isort_config = isort.Config(settings_path=self.settings_path, **self.isort_config_kwargs)
200
188
 
201
189
  self.custom_formatters_kwargs = custom_formatters_kwargs or {}
202
190
  self.custom_formatters = self._check_custom_formatters(custom_formatters)
203
191
 
204
- def _load_custom_formatter(
205
- self, custom_formatter_import: str
206
- ) -> CustomCodeFormatter:
192
+ def _load_custom_formatter(self, custom_formatter_import: str) -> CustomCodeFormatter:
207
193
  import_ = import_module(custom_formatter_import)
208
194
 
209
- if not hasattr(import_, 'CodeFormatter'):
210
- raise NameError(
211
- f'Custom formatter module `{import_.__name__}` must contains object with name Formatter'
212
- )
195
+ if not hasattr(import_, "CodeFormatter"):
196
+ msg = f"Custom formatter module `{import_.__name__}` must contains object with name Formatter"
197
+ raise NameError(msg)
213
198
 
214
- formatter_class = import_.__getattribute__('CodeFormatter')
199
+ formatter_class = import_.__getattribute__("CodeFormatter") # noqa: PLC2801
215
200
 
216
201
  if not issubclass(formatter_class, CustomCodeFormatter):
217
- raise TypeError(
218
- f'The custom module {custom_formatter_import} must inherit from `datamodel-code-generator`'
219
- )
202
+ msg = f"The custom module {custom_formatter_import} must inherit from `datamodel-code-generator`"
203
+ raise TypeError(msg)
220
204
 
221
205
  return formatter_class(formatter_kwargs=self.custom_formatters_kwargs)
222
206
 
223
- def _check_custom_formatters(
224
- self, custom_formatters: Optional[List[str]]
225
- ) -> List[CustomCodeFormatter]:
207
+ def _check_custom_formatters(self, custom_formatters: list[str] | None) -> list[CustomCodeFormatter]:
226
208
  if custom_formatters is None:
227
209
  return []
228
210
 
229
- return [
230
- self._load_custom_formatter(custom_formatter_import)
231
- for custom_formatter_import in custom_formatters
232
- ]
211
+ return [self._load_custom_formatter(custom_formatter_import) for custom_formatter_import in custom_formatters]
233
212
 
234
213
  def format_code(
235
214
  self,
@@ -253,24 +232,23 @@ class CodeFormatter:
253
232
 
254
233
  def apply_isort(self, code: str) -> str: ...
255
234
 
256
- else:
257
- if isort.__version__.startswith('4.'):
235
+ elif isort.__version__.startswith("4."):
258
236
 
259
- def apply_isort(self, code: str) -> str:
260
- return isort.SortImports(
261
- file_contents=code,
262
- settings_path=self.settings_path,
263
- **self.isort_config_kwargs,
264
- ).output
237
+ def apply_isort(self, code: str) -> str:
238
+ return isort.SortImports(
239
+ file_contents=code,
240
+ settings_path=self.settings_path,
241
+ **self.isort_config_kwargs,
242
+ ).output
265
243
 
266
- else:
244
+ else:
267
245
 
268
- def apply_isort(self, code: str) -> str:
269
- return isort.code(code, config=self.isort_config)
246
+ def apply_isort(self, code: str) -> str:
247
+ return isort.code(code, config=self.isort_config)
270
248
 
271
249
 
272
250
  class CustomCodeFormatter:
273
- def __init__(self, formatter_kwargs: Dict[str, Any]) -> None:
251
+ def __init__(self, formatter_kwargs: dict[str, Any]) -> None:
274
252
  self.formatter_kwargs = formatter_kwargs
275
253
 
276
254
  def apply(self, code: str) -> str:
@@ -1,30 +1,29 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Optional, Sequence, Tuple
3
+ from typing import Sequence
4
4
 
5
5
  try:
6
6
  import httpx
7
- except ImportError: # pragma: no cover
8
- raise Exception(
9
- "Please run `$pip install 'datamodel-code-generator[http]`' to resolve URL Reference"
10
- )
7
+ except ImportError as exc: # pragma: no cover
8
+ msg = "Please run `$pip install 'datamodel-code-generator[http]`' to resolve URL Reference"
9
+ raise Exception(msg) from exc # noqa: TRY002
11
10
 
12
11
 
13
12
  def get_body(
14
13
  url: str,
15
- headers: Optional[Sequence[Tuple[str, str]]] = None,
16
- ignore_tls: bool = False,
17
- query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
14
+ headers: Sequence[tuple[str, str]] | None = None,
15
+ ignore_tls: bool = False, # noqa: FBT001, FBT002
16
+ query_parameters: Sequence[tuple[str, str]] | None = None,
18
17
  ) -> str:
19
18
  return httpx.get(
20
19
  url,
21
20
  headers=headers,
22
21
  verify=not ignore_tls,
23
22
  follow_redirects=True,
24
- params=query_parameters, # pyright: ignore [reportArgumentType]
23
+ params=query_parameters, # pyright: ignore[reportArgumentType]
25
24
  # TODO: Improve params type
26
25
  ).text
27
26
 
28
27
 
29
- def join_url(url: str, ref: str = '.') -> str:
28
+ def join_url(url: str, ref: str = ".") -> str:
30
29
  return str(httpx.URL(url).join(ref))
@@ -2,86 +2,79 @@ from __future__ import annotations
2
2
 
3
3
  from collections import defaultdict
4
4
  from functools import lru_cache
5
- from typing import DefaultDict, Dict, Iterable, List, Optional, Set, Tuple, Union
5
+ from itertools import starmap
6
+ from typing import DefaultDict, Iterable, Optional, Set
6
7
 
7
8
  from datamodel_code_generator.util import BaseModel
8
9
 
9
10
 
10
11
  class Import(BaseModel):
11
- from_: Optional[str] = None
12
+ from_: Optional[str] = None # noqa: UP045
12
13
  import_: str
13
- alias: Optional[str] = None
14
- reference_path: Optional[str] = None
14
+ alias: Optional[str] = None # noqa: UP045
15
+ reference_path: Optional[str] = None # noqa: UP045
15
16
 
16
17
  @classmethod
17
18
  @lru_cache
18
19
  def from_full_path(cls, class_path: str) -> Import:
19
- split_class_path: List[str] = class_path.split('.')
20
- return Import(
21
- from_='.'.join(split_class_path[:-1]) or None, import_=split_class_path[-1]
22
- )
20
+ split_class_path: list[str] = class_path.split(".")
21
+ return Import(from_=".".join(split_class_path[:-1]) or None, import_=split_class_path[-1])
23
22
 
24
23
 
25
24
  class Imports(DefaultDict[Optional[str], Set[str]]):
26
25
  def __str__(self) -> str:
27
26
  return self.dump()
28
27
 
29
- def __init__(self, use_exact: bool = False) -> None:
28
+ def __init__(self, use_exact: bool = False) -> None: # noqa: FBT001, FBT002
30
29
  super().__init__(set)
31
- self.alias: DefaultDict[Optional[str], Dict[str, str]] = defaultdict(dict)
32
- self.counter: Dict[Tuple[Optional[str], str], int] = defaultdict(int)
33
- self.reference_paths: Dict[str, Import] = {}
30
+ self.alias: defaultdict[str | None, dict[str, str]] = defaultdict(dict)
31
+ self.counter: dict[tuple[str | None, str], int] = defaultdict(int)
32
+ self.reference_paths: dict[str, Import] = {}
34
33
  self.use_exact: bool = use_exact
35
34
 
36
- def _set_alias(self, from_: Optional[str], imports: Set[str]) -> List[str]:
35
+ def _set_alias(self, from_: str | None, imports: set[str]) -> list[str]:
37
36
  return [
38
- f'{i} as {self.alias[from_][i]}'
39
- if i in self.alias[from_] and i != self.alias[from_][i]
40
- else i
37
+ f"{i} as {self.alias[from_][i]}" if i in self.alias[from_] and i != self.alias[from_][i] else i
41
38
  for i in sorted(imports)
42
39
  ]
43
40
 
44
- def create_line(self, from_: Optional[str], imports: Set[str]) -> str:
41
+ def create_line(self, from_: str | None, imports: set[str]) -> str:
45
42
  if from_:
46
- return f'from {from_} import {", ".join(self._set_alias(from_, imports))}'
47
- return '\n'.join(f'import {i}' for i in self._set_alias(from_, imports))
43
+ return f"from {from_} import {', '.join(self._set_alias(from_, imports))}"
44
+ return "\n".join(f"import {i}" for i in self._set_alias(from_, imports))
48
45
 
49
46
  def dump(self) -> str:
50
- return '\n'.join(
51
- self.create_line(from_, imports) for from_, imports in self.items()
52
- )
47
+ return "\n".join(starmap(self.create_line, self.items()))
53
48
 
54
- def append(self, imports: Union[Import, Iterable[Import], None]) -> None:
49
+ def append(self, imports: Import | Iterable[Import] | None) -> None:
55
50
  if imports:
56
51
  if isinstance(imports, Import):
57
52
  imports = [imports]
58
53
  for import_ in imports:
59
54
  if import_.reference_path:
60
55
  self.reference_paths[import_.reference_path] = import_
61
- if '.' in import_.import_:
56
+ if "." in import_.import_:
62
57
  self[None].add(import_.import_)
63
- self.counter[(None, import_.import_)] += 1
58
+ self.counter[None, import_.import_] += 1
64
59
  else:
65
60
  self[import_.from_].add(import_.import_)
66
- self.counter[(import_.from_, import_.import_)] += 1
61
+ self.counter[import_.from_, import_.import_] += 1
67
62
  if import_.alias:
68
63
  self.alias[import_.from_][import_.import_] = import_.alias
69
64
 
70
- def remove(self, imports: Union[Import, Iterable[Import]]) -> None:
65
+ def remove(self, imports: Import | Iterable[Import]) -> None:
71
66
  if isinstance(imports, Import): # pragma: no cover
72
67
  imports = [imports]
73
68
  for import_ in imports:
74
- if '.' in import_.import_: # pragma: no cover
75
- self.counter[(None, import_.import_)] -= 1
76
- if self.counter[(None, import_.import_)] == 0: # pragma: no cover
69
+ if "." in import_.import_: # pragma: no cover
70
+ self.counter[None, import_.import_] -= 1
71
+ if self.counter[None, import_.import_] == 0: # pragma: no cover
77
72
  self[None].remove(import_.import_)
78
73
  if not self[None]:
79
74
  del self[None]
80
75
  else:
81
- self.counter[(import_.from_, import_.import_)] -= 1 # pragma: no cover
82
- if (
83
- self.counter[(import_.from_, import_.import_)] == 0
84
- ): # pragma: no cover
76
+ self.counter[import_.from_, import_.import_] -= 1 # pragma: no cover
77
+ if self.counter[import_.from_, import_.import_] == 0: # pragma: no cover
85
78
  self[import_.from_].remove(import_.import_)
86
79
  if not self[import_.from_]:
87
80
  del self[import_.from_]
@@ -95,33 +88,33 @@ class Imports(DefaultDict[Optional[str], Set[str]]):
95
88
  self.remove(self.reference_paths[reference_path])
96
89
 
97
90
 
98
- IMPORT_ANNOTATED = Import.from_full_path('typing.Annotated')
99
- IMPORT_ANNOTATED_BACKPORT = Import.from_full_path('typing_extensions.Annotated')
100
- IMPORT_ANY = Import.from_full_path('typing.Any')
101
- IMPORT_LIST = Import.from_full_path('typing.List')
102
- IMPORT_SET = Import.from_full_path('typing.Set')
103
- IMPORT_UNION = Import.from_full_path('typing.Union')
104
- IMPORT_OPTIONAL = Import.from_full_path('typing.Optional')
105
- IMPORT_LITERAL = Import.from_full_path('typing.Literal')
106
- IMPORT_TYPE_ALIAS = Import.from_full_path('typing.TypeAlias')
107
- IMPORT_LITERAL_BACKPORT = Import.from_full_path('typing_extensions.Literal')
108
- IMPORT_SEQUENCE = Import.from_full_path('typing.Sequence')
109
- IMPORT_FROZEN_SET = Import.from_full_path('typing.FrozenSet')
110
- IMPORT_MAPPING = Import.from_full_path('typing.Mapping')
111
- IMPORT_ABC_SEQUENCE = Import.from_full_path('collections.abc.Sequence')
112
- IMPORT_ABC_SET = Import.from_full_path('collections.abc.Set')
113
- IMPORT_ABC_MAPPING = Import.from_full_path('collections.abc.Mapping')
114
- IMPORT_ENUM = Import.from_full_path('enum.Enum')
115
- IMPORT_ANNOTATIONS = Import.from_full_path('__future__.annotations')
116
- IMPORT_DICT = Import.from_full_path('typing.Dict')
117
- IMPORT_DECIMAL = Import.from_full_path('decimal.Decimal')
118
- IMPORT_DATE = Import.from_full_path('datetime.date')
119
- IMPORT_DATETIME = Import.from_full_path('datetime.datetime')
120
- IMPORT_TIMEDELTA = Import.from_full_path('datetime.timedelta')
121
- IMPORT_PATH = Import.from_full_path('pathlib.Path')
122
- IMPORT_TIME = Import.from_full_path('datetime.time')
123
- IMPORT_UUID = Import.from_full_path('uuid.UUID')
124
- IMPORT_PENDULUM_DATE = Import.from_full_path('pendulum.Date')
125
- IMPORT_PENDULUM_DATETIME = Import.from_full_path('pendulum.DateTime')
126
- IMPORT_PENDULUM_DURATION = Import.from_full_path('pendulum.Duration')
127
- IMPORT_PENDULUM_TIME = Import.from_full_path('pendulum.Time')
91
+ IMPORT_ANNOTATED = Import.from_full_path("typing.Annotated")
92
+ IMPORT_ANNOTATED_BACKPORT = Import.from_full_path("typing_extensions.Annotated")
93
+ IMPORT_ANY = Import.from_full_path("typing.Any")
94
+ IMPORT_LIST = Import.from_full_path("typing.List")
95
+ IMPORT_SET = Import.from_full_path("typing.Set")
96
+ IMPORT_UNION = Import.from_full_path("typing.Union")
97
+ IMPORT_OPTIONAL = Import.from_full_path("typing.Optional")
98
+ IMPORT_LITERAL = Import.from_full_path("typing.Literal")
99
+ IMPORT_TYPE_ALIAS = Import.from_full_path("typing.TypeAlias")
100
+ IMPORT_LITERAL_BACKPORT = Import.from_full_path("typing_extensions.Literal")
101
+ IMPORT_SEQUENCE = Import.from_full_path("typing.Sequence")
102
+ IMPORT_FROZEN_SET = Import.from_full_path("typing.FrozenSet")
103
+ IMPORT_MAPPING = Import.from_full_path("typing.Mapping")
104
+ IMPORT_ABC_SEQUENCE = Import.from_full_path("collections.abc.Sequence")
105
+ IMPORT_ABC_SET = Import.from_full_path("collections.abc.Set")
106
+ IMPORT_ABC_MAPPING = Import.from_full_path("collections.abc.Mapping")
107
+ IMPORT_ENUM = Import.from_full_path("enum.Enum")
108
+ IMPORT_ANNOTATIONS = Import.from_full_path("__future__.annotations")
109
+ IMPORT_DICT = Import.from_full_path("typing.Dict")
110
+ IMPORT_DECIMAL = Import.from_full_path("decimal.Decimal")
111
+ IMPORT_DATE = Import.from_full_path("datetime.date")
112
+ IMPORT_DATETIME = Import.from_full_path("datetime.datetime")
113
+ IMPORT_TIMEDELTA = Import.from_full_path("datetime.timedelta")
114
+ IMPORT_PATH = Import.from_full_path("pathlib.Path")
115
+ IMPORT_TIME = Import.from_full_path("datetime.time")
116
+ IMPORT_UUID = Import.from_full_path("uuid.UUID")
117
+ IMPORT_PENDULUM_DATE = Import.from_full_path("pendulum.Date")
118
+ IMPORT_PENDULUM_DATETIME = Import.from_full_path("pendulum.DateTime")
119
+ IMPORT_PENDULUM_DURATION = Import.from_full_path("pendulum.Duration")
120
+ IMPORT_PENDULUM_TIME = Import.from_full_path("pendulum.Time")
@@ -1,38 +1,38 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import sys
4
- from typing import TYPE_CHECKING, Callable, Iterable, List, NamedTuple, Optional, Type
4
+ from typing import TYPE_CHECKING, Callable, Iterable, NamedTuple
5
+
6
+ from datamodel_code_generator import DatetimeClassType, PythonVersion
5
7
 
6
- from .. import DatetimeClassType, PythonVersion
7
- from ..types import DataTypeManager as DataTypeManagerABC
8
8
  from .base import ConstraintsBase, DataModel, DataModelFieldBase
9
9
 
10
10
  if TYPE_CHECKING:
11
- from .. import DataModelType
11
+ from datamodel_code_generator import DataModelType
12
+ from datamodel_code_generator.types import DataTypeManager as DataTypeManagerABC
12
13
 
13
14
  DEFAULT_TARGET_DATETIME_CLASS = DatetimeClassType.Datetime
14
- DEFAULT_TARGET_PYTHON_VERSION = PythonVersion(
15
- f'{sys.version_info.major}.{sys.version_info.minor}'
16
- )
15
+ DEFAULT_TARGET_PYTHON_VERSION = PythonVersion(f"{sys.version_info.major}.{sys.version_info.minor}")
17
16
 
18
17
 
19
18
  class DataModelSet(NamedTuple):
20
- data_model: Type[DataModel]
21
- root_model: Type[DataModel]
22
- field_model: Type[DataModelFieldBase]
23
- data_type_manager: Type[DataTypeManagerABC]
24
- dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]]
25
- known_third_party: Optional[List[str]] = None
19
+ data_model: type[DataModel]
20
+ root_model: type[DataModel]
21
+ field_model: type[DataModelFieldBase]
22
+ data_type_manager: type[DataTypeManagerABC]
23
+ dump_resolve_reference_action: Callable[[Iterable[str]], str] | None
24
+ known_third_party: list[str] | None = None
26
25
 
27
26
 
28
27
  def get_data_model_types(
29
28
  data_model_type: DataModelType,
30
29
  target_python_version: PythonVersion = DEFAULT_TARGET_PYTHON_VERSION,
31
- target_datetime_class: Optional[DatetimeClassType] = None,
30
+ target_datetime_class: DatetimeClassType | None = None,
32
31
  ) -> DataModelSet:
33
- from .. import DataModelType
34
- from . import dataclass, msgspec, pydantic, pydantic_v2, rootmodel, typed_dict
35
- from .types import DataTypeManager
32
+ from datamodel_code_generator import DataModelType # noqa: PLC0415
33
+
34
+ from . import dataclass, msgspec, pydantic, pydantic_v2, rootmodel, typed_dict # noqa: PLC0415
35
+ from .types import DataTypeManager # noqa: PLC0415
36
36
 
37
37
  if target_datetime_class is None:
38
38
  target_datetime_class = DEFAULT_TARGET_DATETIME_CLASS
@@ -44,7 +44,7 @@ def get_data_model_types(
44
44
  data_type_manager=pydantic.DataTypeManager,
45
45
  dump_resolve_reference_action=pydantic.dump_resolve_reference_action,
46
46
  )
47
- elif data_model_type == DataModelType.PydanticV2BaseModel:
47
+ if data_model_type == DataModelType.PydanticV2BaseModel:
48
48
  return DataModelSet(
49
49
  data_model=pydantic_v2.BaseModel,
50
50
  root_model=pydantic_v2.RootModel,
@@ -52,7 +52,7 @@ def get_data_model_types(
52
52
  data_type_manager=pydantic_v2.DataTypeManager,
53
53
  dump_resolve_reference_action=pydantic_v2.dump_resolve_reference_action,
54
54
  )
55
- elif data_model_type == DataModelType.DataclassesDataclass:
55
+ if data_model_type == DataModelType.DataclassesDataclass:
56
56
  return DataModelSet(
57
57
  data_model=dataclass.DataClass,
58
58
  root_model=rootmodel.RootModel,
@@ -60,13 +60,9 @@ def get_data_model_types(
60
60
  data_type_manager=dataclass.DataTypeManager,
61
61
  dump_resolve_reference_action=None,
62
62
  )
63
- elif data_model_type == DataModelType.TypingTypedDict:
63
+ if data_model_type == DataModelType.TypingTypedDict:
64
64
  return DataModelSet(
65
- data_model=(
66
- typed_dict.TypedDict
67
- if target_python_version.has_typed_dict
68
- else typed_dict.TypedDictBackport
69
- ),
65
+ data_model=(typed_dict.TypedDict if target_python_version.has_typed_dict else typed_dict.TypedDictBackport),
70
66
  root_model=rootmodel.RootModel,
71
67
  field_model=(
72
68
  typed_dict.DataModelField
@@ -76,18 +72,17 @@ def get_data_model_types(
76
72
  data_type_manager=DataTypeManager,
77
73
  dump_resolve_reference_action=None,
78
74
  )
79
- elif data_model_type == DataModelType.MsgspecStruct:
75
+ if data_model_type == DataModelType.MsgspecStruct:
80
76
  return DataModelSet(
81
77
  data_model=msgspec.Struct,
82
78
  root_model=msgspec.RootModel,
83
79
  field_model=msgspec.DataModelField,
84
80
  data_type_manager=msgspec.DataTypeManager,
85
81
  dump_resolve_reference_action=None,
86
- known_third_party=['msgspec'],
82
+ known_third_party=["msgspec"],
87
83
  )
88
- raise ValueError(
89
- f'{data_model_type} is unsupported data model type'
90
- ) # pragma: no cover
84
+ msg = f"{data_model_type} is unsupported data model type"
85
+ raise ValueError(msg) # pragma: no cover
91
86
 
92
87
 
93
- __all__ = ['ConstraintsBase', 'DataModel', 'DataModelFieldBase']
88
+ __all__ = ["ConstraintsBase", "DataModel", "DataModelFieldBase"]