datamodel-code-generator 0.27.3__py3-none-any.whl → 0.28.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (36) hide show
  1. datamodel_code_generator/__init__.py +10 -7
  2. datamodel_code_generator/__main__.py +18 -39
  3. datamodel_code_generator/arguments.py +2 -2
  4. datamodel_code_generator/format.py +9 -39
  5. datamodel_code_generator/http.py +4 -1
  6. datamodel_code_generator/imports.py +5 -4
  7. datamodel_code_generator/model/__init__.py +4 -2
  8. datamodel_code_generator/model/base.py +12 -23
  9. datamodel_code_generator/model/dataclass.py +7 -14
  10. datamodel_code_generator/model/enum.py +2 -2
  11. datamodel_code_generator/model/msgspec.py +9 -17
  12. datamodel_code_generator/model/pydantic/__init__.py +4 -1
  13. datamodel_code_generator/model/pydantic/base_model.py +4 -4
  14. datamodel_code_generator/model/pydantic/dataclass.py +2 -2
  15. datamodel_code_generator/model/pydantic/types.py +6 -3
  16. datamodel_code_generator/model/pydantic_v2/__init__.py +5 -2
  17. datamodel_code_generator/model/pydantic_v2/base_model.py +4 -12
  18. datamodel_code_generator/model/pydantic_v2/types.py +4 -1
  19. datamodel_code_generator/model/scalar.py +2 -2
  20. datamodel_code_generator/model/typed_dict.py +5 -16
  21. datamodel_code_generator/model/types.py +6 -3
  22. datamodel_code_generator/model/union.py +2 -2
  23. datamodel_code_generator/parser/__init__.py +6 -3
  24. datamodel_code_generator/parser/base.py +11 -27
  25. datamodel_code_generator/parser/graphql.py +7 -8
  26. datamodel_code_generator/parser/jsonschema.py +33 -44
  27. datamodel_code_generator/parser/openapi.py +20 -31
  28. datamodel_code_generator/reference.py +11 -27
  29. datamodel_code_generator/types.py +14 -45
  30. datamodel_code_generator/util.py +1 -10
  31. {datamodel_code_generator-0.27.3.dist-info → datamodel_code_generator-0.28.0.dist-info}/METADATA +4 -5
  32. datamodel_code_generator-0.28.0.dist-info/RECORD +59 -0
  33. datamodel_code_generator-0.27.3.dist-info/RECORD +0 -59
  34. {datamodel_code_generator-0.27.3.dist-info → datamodel_code_generator-0.28.0.dist-info}/WHEEL +0 -0
  35. {datamodel_code_generator-0.27.3.dist-info → datamodel_code_generator-0.28.0.dist-info}/entry_points.txt +0 -0
  36. {datamodel_code_generator-0.27.3.dist-info → datamodel_code_generator-0.28.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,8 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from abc import ABC
4
+ from functools import cached_property
4
5
  from pathlib import Path
5
- from typing import TYPE_CHECKING, Any, ClassVar, Optional, Set
6
+ from typing import TYPE_CHECKING, Any, ClassVar, Optional
6
7
 
7
8
  from pydantic import Field
8
9
 
@@ -18,7 +19,6 @@ from datamodel_code_generator.model.pydantic.imports import (
18
19
  IMPORT_FIELD,
19
20
  )
20
21
  from datamodel_code_generator.types import UnionIntFloat, chain_as_tuple
21
- from datamodel_code_generator.util import cached_property
22
22
 
23
23
  if TYPE_CHECKING:
24
24
  from collections import defaultdict
@@ -41,7 +41,7 @@ class Constraints(ConstraintsBase):
41
41
 
42
42
 
43
43
  class DataModelField(DataModelFieldBase):
44
- _EXCLUDE_FIELD_KEYS: ClassVar[Set[str]] = { # noqa: UP006
44
+ _EXCLUDE_FIELD_KEYS: ClassVar[set[str]] = {
45
45
  "alias",
46
46
  "default",
47
47
  "const",
@@ -56,7 +56,7 @@ class DataModelField(DataModelFieldBase):
56
56
  "max_length",
57
57
  "regex",
58
58
  }
59
- _COMPARE_EXPRESSIONS: ClassVar[Set[str]] = {"gt", "ge", "lt", "le"} # noqa: UP006
59
+ _COMPARE_EXPRESSIONS: ClassVar[set[str]] = {"gt", "ge", "lt", "le"}
60
60
  constraints: Optional[Constraints] = None # noqa: UP045
61
61
  _PARSE_METHOD: ClassVar[str] = "parse_obj"
62
62
 
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import TYPE_CHECKING, ClassVar, Tuple
3
+ from typing import TYPE_CHECKING, ClassVar
4
4
 
5
5
  from datamodel_code_generator.model import DataModel
6
6
  from datamodel_code_generator.model.pydantic.imports import IMPORT_DATACLASS
@@ -11,4 +11,4 @@ if TYPE_CHECKING:
11
11
 
12
12
  class DataClass(DataModel):
13
13
  TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/dataclass.jinja2"
14
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_DATACLASS,) # noqa: UP006
14
+ DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_DATACLASS,)
@@ -1,9 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from decimal import Decimal
4
- from typing import Any, ClassVar, Sequence
4
+ from typing import TYPE_CHECKING, Any, ClassVar
5
5
 
6
- from datamodel_code_generator.format import DatetimeClassType, PythonVersion
6
+ from datamodel_code_generator.format import DatetimeClassType, PythonVersion, PythonVersionMin
7
7
  from datamodel_code_generator.imports import (
8
8
  IMPORT_ANY,
9
9
  IMPORT_DATE,
@@ -53,6 +53,9 @@ from datamodel_code_generator.model.pydantic.imports import (
53
53
  from datamodel_code_generator.types import DataType, StrictTypes, Types, UnionIntFloat
54
54
  from datamodel_code_generator.types import DataTypeManager as _DataTypeManager
55
55
 
56
+ if TYPE_CHECKING:
57
+ from collections.abc import Sequence
58
+
56
59
 
57
60
  def type_map_factory(
58
61
  data_type: type[DataType],
@@ -155,7 +158,7 @@ class DataTypeManager(_DataTypeManager):
155
158
 
156
159
  def __init__( # noqa: PLR0913, PLR0917
157
160
  self,
158
- python_version: PythonVersion = PythonVersion.PY_38,
161
+ python_version: PythonVersion = PythonVersionMin,
159
162
  use_standard_collections: bool = False, # noqa: FBT001, FBT002
160
163
  use_generic_container_types: bool = False, # noqa: FBT001, FBT002
161
164
  strict_types: Sequence[StrictTypes] | None = None,
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Iterable, Optional, Tuple
3
+ from typing import TYPE_CHECKING, Optional
4
4
 
5
5
  from pydantic import BaseModel as _BaseModel
6
6
 
@@ -8,6 +8,9 @@ from .base_model import BaseModel, DataModelField, UnionMode
8
8
  from .root_model import RootModel
9
9
  from .types import DataTypeManager
10
10
 
11
+ if TYPE_CHECKING:
12
+ from collections.abc import Iterable
13
+
11
14
 
12
15
  def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
13
16
  return "\n".join(f"{class_name}.model_rebuild()" for class_name in class_names)
@@ -21,7 +24,7 @@ class ConfigDict(_BaseModel):
21
24
  from_attributes: Optional[bool] = None # noqa: UP045
22
25
  frozen: Optional[bool] = None # noqa: UP045
23
26
  arbitrary_types_allowed: Optional[bool] = None # noqa: UP045
24
- protected_namespaces: Optional[Tuple[str, ...]] = None # noqa: UP006, UP045
27
+ protected_namespaces: Optional[tuple[str, ...]] = None # noqa: UP045
25
28
  regex_engine: Optional[str] = None # noqa: UP045
26
29
  use_enum_values: Optional[bool] = None # noqa: UP045
27
30
 
@@ -2,15 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import re
4
4
  from enum import Enum
5
- from typing import (
6
- TYPE_CHECKING,
7
- Any,
8
- ClassVar,
9
- List,
10
- NamedTuple,
11
- Optional,
12
- Set,
13
- )
5
+ from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, Optional
14
6
 
15
7
  from pydantic import Field
16
8
  from typing_extensions import Literal
@@ -59,7 +51,7 @@ class Constraints(_Constraints):
59
51
 
60
52
 
61
53
  class DataModelField(DataModelFieldV1):
62
- _EXCLUDE_FIELD_KEYS: ClassVar[Set[str]] = { # noqa: UP006
54
+ _EXCLUDE_FIELD_KEYS: ClassVar[set[str]] = {
63
55
  "alias",
64
56
  "default",
65
57
  "gt",
@@ -71,7 +63,7 @@ class DataModelField(DataModelFieldV1):
71
63
  "max_length",
72
64
  "pattern",
73
65
  }
74
- _DEFAULT_FIELD_KEYS: ClassVar[Set[str]] = { # noqa: UP006
66
+ _DEFAULT_FIELD_KEYS: ClassVar[set[str]] = {
75
67
  "default",
76
68
  "default_factory",
77
69
  "alias",
@@ -165,7 +157,7 @@ class ConfigAttribute(NamedTuple):
165
157
  class BaseModel(BaseModelBase):
166
158
  TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic_v2/BaseModel.jinja2"
167
159
  BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
168
- CONFIG_ATTRIBUTES: ClassVar[List[ConfigAttribute]] = [ # noqa: UP006
160
+ CONFIG_ATTRIBUTES: ClassVar[list[ConfigAttribute]] = [
169
161
  ConfigAttribute("allow_population_by_field_name", "populate_by_name", False), # noqa: FBT003
170
162
  ConfigAttribute("populate_by_name", "populate_by_name", False), # noqa: FBT003
171
163
  ConfigAttribute("allow_mutation", "frozen", True), # noqa: FBT003
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import ClassVar, Sequence
3
+ from typing import TYPE_CHECKING, ClassVar
4
4
 
5
5
  from datamodel_code_generator.format import DatetimeClassType
6
6
  from datamodel_code_generator.model.pydantic import DataTypeManager as _DataTypeManager
@@ -11,6 +11,9 @@ from datamodel_code_generator.model.pydantic_v2.imports import (
11
11
  )
12
12
  from datamodel_code_generator.types import DataType, StrictTypes, Types
13
13
 
14
+ if TYPE_CHECKING:
15
+ from collections.abc import Sequence
16
+
14
17
 
15
18
  class DataTypeManager(_DataTypeManager):
16
19
  PATTERN_KEY: ClassVar[str] = "pattern"
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from collections import defaultdict
4
- from typing import TYPE_CHECKING, Any, ClassVar, Tuple
4
+ from typing import TYPE_CHECKING, Any, ClassVar
5
5
 
6
6
  from datamodel_code_generator.imports import IMPORT_TYPE_ALIAS, Import
7
7
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
@@ -32,7 +32,7 @@ DEFAULT_GRAPHQL_SCALAR_TYPES: dict[str, str] = {
32
32
  class DataTypeScalar(DataModel):
33
33
  TEMPLATE_FILE_PATH: ClassVar[str] = "Scalar.jinja2"
34
34
  BASE_CLASS: ClassVar[str] = ""
35
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPE_ALIAS,) # noqa: UP006
35
+ DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPE_ALIAS,)
36
36
 
37
37
  def __init__( # noqa: PLR0913
38
38
  self,
@@ -1,13 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import keyword
4
- from typing import (
5
- TYPE_CHECKING,
6
- Any,
7
- ClassVar,
8
- Iterator,
9
- Tuple,
10
- )
4
+ from typing import TYPE_CHECKING, Any, ClassVar
11
5
 
12
6
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
13
7
  from datamodel_code_generator.model.base import UNDEFINED
@@ -15,12 +9,12 @@ from datamodel_code_generator.model.imports import (
15
9
  IMPORT_NOT_REQUIRED,
16
10
  IMPORT_NOT_REQUIRED_BACKPORT,
17
11
  IMPORT_TYPED_DICT,
18
- IMPORT_TYPED_DICT_BACKPORT,
19
12
  )
20
13
  from datamodel_code_generator.types import NOT_REQUIRED_PREFIX
21
14
 
22
15
  if TYPE_CHECKING:
23
16
  from collections import defaultdict
17
+ from collections.abc import Iterator
24
18
  from pathlib import Path
25
19
 
26
20
  from datamodel_code_generator.reference import Reference
@@ -48,7 +42,7 @@ def _is_valid_field_name(field: DataModelFieldBase) -> bool:
48
42
  class TypedDict(DataModel):
49
43
  TEMPLATE_FILE_PATH: ClassVar[str] = "TypedDict.jinja2"
50
44
  BASE_CLASS: ClassVar[str] = "typing.TypedDict"
51
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPED_DICT,) # noqa: UP006
45
+ DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPED_DICT,)
52
46
 
53
47
  def __init__( # noqa: PLR0913
54
48
  self,
@@ -115,13 +109,8 @@ class TypedDict(DataModel):
115
109
  )
116
110
 
117
111
 
118
- class TypedDictBackport(TypedDict):
119
- BASE_CLASS: ClassVar[str] = "typing_extensions.TypedDict"
120
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPED_DICT_BACKPORT,) # noqa: UP006
121
-
122
-
123
112
  class DataModelField(DataModelFieldBase):
124
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_NOT_REQUIRED,) # noqa: UP006
113
+ DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_NOT_REQUIRED,)
125
114
 
126
115
  @property
127
116
  def key(self) -> str:
@@ -153,4 +142,4 @@ class DataModelField(DataModelFieldBase):
153
142
 
154
143
 
155
144
  class DataModelFieldBackport(DataModelField):
156
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_NOT_REQUIRED_BACKPORT,) # noqa: UP006
145
+ DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_NOT_REQUIRED_BACKPORT,)
@@ -1,8 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Any, Sequence
3
+ from typing import TYPE_CHECKING, Any
4
4
 
5
- from datamodel_code_generator import DatetimeClassType, PythonVersion
5
+ from datamodel_code_generator import DatetimeClassType, PythonVersion, PythonVersionMin
6
6
  from datamodel_code_generator.imports import (
7
7
  IMPORT_ANY,
8
8
  IMPORT_DECIMAL,
@@ -11,6 +11,9 @@ from datamodel_code_generator.imports import (
11
11
  from datamodel_code_generator.types import DataType, StrictTypes, Types
12
12
  from datamodel_code_generator.types import DataTypeManager as _DataTypeManager
13
13
 
14
+ if TYPE_CHECKING:
15
+ from collections.abc import Sequence
16
+
14
17
 
15
18
  def type_map_factory(data_type: type[DataType]) -> dict[Types, DataType]:
16
19
  data_type_int = data_type(type="int")
@@ -57,7 +60,7 @@ def type_map_factory(data_type: type[DataType]) -> dict[Types, DataType]:
57
60
  class DataTypeManager(_DataTypeManager):
58
61
  def __init__( # noqa: PLR0913, PLR0917
59
62
  self,
60
- python_version: PythonVersion = PythonVersion.PY_38,
63
+ python_version: PythonVersion = PythonVersionMin,
61
64
  use_standard_collections: bool = False, # noqa: FBT001, FBT002
62
65
  use_generic_container_types: bool = False, # noqa: FBT001, FBT002
63
66
  strict_types: Sequence[StrictTypes] | None = None,
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import TYPE_CHECKING, Any, ClassVar, Tuple
3
+ from typing import TYPE_CHECKING, Any, ClassVar
4
4
 
5
5
  from datamodel_code_generator.imports import IMPORT_TYPE_ALIAS, IMPORT_UNION, Import
6
6
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
16
16
  class DataTypeUnion(DataModel):
17
17
  TEMPLATE_FILE_PATH: ClassVar[str] = "Union.jinja2"
18
18
  BASE_CLASS: ClassVar[str] = ""
19
- DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = ( # noqa: UP006
19
+ DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (
20
20
  IMPORT_TYPE_ALIAS,
21
21
  IMPORT_UNION,
22
22
  )
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from enum import Enum
4
- from typing import Callable, Dict, TypeVar
4
+ from typing import Callable, TypeVar
5
5
 
6
6
  TK = TypeVar("TK")
7
7
  TV = TypeVar("TV")
@@ -12,7 +12,7 @@ class LiteralType(Enum):
12
12
  One = "one"
13
13
 
14
14
 
15
- class DefaultPutDict(Dict[TK, TV]):
15
+ class DefaultPutDict(dict[TK, TV]):
16
16
  def get_or_put(
17
17
  self,
18
18
  key: TK,
@@ -31,4 +31,7 @@ class DefaultPutDict(Dict[TK, TV]):
31
31
  raise ValueError(msg) # pragma: no cover
32
32
 
33
33
 
34
- __all__ = ["LiteralType"]
34
+ __all__ = [
35
+ "DefaultPutDict",
36
+ "LiteralType",
37
+ ]
@@ -7,32 +7,15 @@ from abc import ABC, abstractmethod
7
7
  from collections import OrderedDict, defaultdict
8
8
  from itertools import groupby
9
9
  from pathlib import Path
10
- from typing import (
11
- Any,
12
- Callable,
13
- Dict,
14
- Iterable,
15
- Iterator,
16
- Mapping,
17
- NamedTuple,
18
- Optional,
19
- Sequence,
20
- Set,
21
- TypeVar,
22
- )
10
+ from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, Protocol, TypeVar, runtime_checkable
23
11
  from urllib.parse import ParseResult
24
12
 
25
13
  from pydantic import BaseModel
26
14
 
27
- from datamodel_code_generator.format import (
28
- CodeFormatter,
29
- DatetimeClassType,
30
- PythonVersion,
31
- )
15
+ from datamodel_code_generator.format import CodeFormatter, DatetimeClassType, PythonVersion, PythonVersionMin
32
16
  from datamodel_code_generator.imports import (
33
17
  IMPORT_ANNOTATIONS,
34
18
  IMPORT_LITERAL,
35
- IMPORT_LITERAL_BACKPORT,
36
19
  Import,
37
20
  Imports,
38
21
  )
@@ -52,7 +35,9 @@ from datamodel_code_generator.model.enum import Enum, Member
52
35
  from datamodel_code_generator.parser import DefaultPutDict, LiteralType
53
36
  from datamodel_code_generator.reference import ModelResolver, Reference
54
37
  from datamodel_code_generator.types import DataType, DataTypeManager, StrictTypes
55
- from datamodel_code_generator.util import Protocol, runtime_checkable
38
+
39
+ if TYPE_CHECKING:
40
+ from collections.abc import Iterable, Iterator, Mapping, Sequence
56
41
 
57
42
  SPECIAL_PATH_FORMAT: str = "#-datamodel-code-generator-#-{}-#-special-#"
58
43
 
@@ -103,8 +88,8 @@ def dump_templates(templates: list[DataModel]) -> str:
103
88
  return "\n\n\n".join(str(m) for m in templates)
104
89
 
105
90
 
106
- ReferenceMapSet = Dict[str, Set[str]]
107
- SortedDataModels = Dict[str, DataModel]
91
+ ReferenceMapSet = dict[str, set[str]]
92
+ SortedDataModels = dict[str, DataModel]
108
93
 
109
94
  MAX_RECURSION_COUNT: int = sys.getrecursionlimit()
110
95
 
@@ -322,7 +307,7 @@ class Parser(ABC):
322
307
  additional_imports: list[str] | None = None,
323
308
  custom_template_dir: Path | None = None,
324
309
  extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
325
- target_python_version: PythonVersion = PythonVersion.PY_38,
310
+ target_python_version: PythonVersion = PythonVersionMin,
326
311
  dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
327
312
  validation: bool = False,
328
313
  field_constraints: bool = False,
@@ -811,10 +796,9 @@ class Parser(ABC):
811
796
  required=True,
812
797
  )
813
798
  )
814
- literal = IMPORT_LITERAL if self.target_python_version.has_literal_type else IMPORT_LITERAL_BACKPORT
815
- has_imported_literal = any(literal == import_ for import_ in imports)
799
+ has_imported_literal = any(import_ == IMPORT_LITERAL for import_ in imports)
816
800
  if has_imported_literal: # pragma: no cover
817
- imports.append(literal)
801
+ imports.append(IMPORT_LITERAL)
818
802
 
819
803
  @classmethod
820
804
  def _create_set_from_list(cls, data_type: DataType) -> DataType | None:
@@ -1177,7 +1161,7 @@ class Parser(ABC):
1177
1161
  ) -> str | dict[tuple[str, ...], Result]:
1178
1162
  self.parse_raw()
1179
1163
 
1180
- if with_import and self.target_python_version != PythonVersion.PY_36:
1164
+ if with_import:
1181
1165
  self.imports.append(IMPORT_ANNOTATIONS)
1182
1166
 
1183
1167
  if format_:
@@ -5,10 +5,6 @@ from typing import (
5
5
  TYPE_CHECKING,
6
6
  Any,
7
7
  Callable,
8
- Iterable,
9
- Iterator,
10
- Mapping,
11
- Sequence,
12
8
  )
13
9
  from urllib.parse import ParseResult
14
10
 
@@ -16,6 +12,7 @@ from datamodel_code_generator import (
16
12
  DefaultPutDict,
17
13
  LiteralType,
18
14
  PythonVersion,
15
+ PythonVersionMin,
19
16
  snooper_to_methods,
20
17
  )
21
18
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
@@ -43,6 +40,7 @@ from datamodel_code_generator.format import DatetimeClassType
43
40
 
44
41
  if TYPE_CHECKING:
45
42
  from collections import defaultdict
43
+ from collections.abc import Iterable, Iterator, Mapping, Sequence
46
44
 
47
45
  graphql_resolver = graphql.type.introspection.TypeResolvers()
48
46
 
@@ -93,7 +91,7 @@ class GraphQLParser(Parser):
93
91
  additional_imports: list[str] | None = None,
94
92
  custom_template_dir: Path | None = None,
95
93
  extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
96
- target_python_version: PythonVersion = PythonVersion.PY_38,
94
+ target_python_version: PythonVersion = PythonVersionMin,
97
95
  dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
98
96
  validation: bool = False,
99
97
  field_constraints: bool = False,
@@ -254,9 +252,10 @@ class GraphQLParser(Parser):
254
252
  path_parts = list(source.path.parts)
255
253
  if self.current_source_path is not None: # pragma: no cover
256
254
  self.current_source_path = source.path
257
- with self.model_resolver.current_base_path_context(
258
- source.path.parent
259
- ), self.model_resolver.current_root_context(path_parts):
255
+ with (
256
+ self.model_resolver.current_base_path_context(source.path.parent),
257
+ self.model_resolver.current_root_context(path_parts),
258
+ ):
260
259
  yield source, path_parts
261
260
 
262
261
  def _resolve_types(self, paths: list[str], schema: graphql.GraphQLSchema) -> None:
@@ -3,25 +3,9 @@ from __future__ import annotations
3
3
  import enum as _enum
4
4
  from collections import defaultdict
5
5
  from contextlib import contextmanager
6
- from functools import lru_cache
6
+ from functools import cached_property, lru_cache
7
7
  from pathlib import Path
8
- from typing import (
9
- TYPE_CHECKING,
10
- Any,
11
- Callable,
12
- ClassVar,
13
- Dict,
14
- Generator,
15
- Iterable,
16
- Iterator,
17
- List,
18
- Mapping,
19
- Optional,
20
- Sequence,
21
- Set,
22
- Type,
23
- Union,
24
- )
8
+ from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Union
25
9
  from urllib.parse import ParseResult
26
10
  from warnings import warn
27
11
 
@@ -35,7 +19,7 @@ from datamodel_code_generator import (
35
19
  load_yaml_from_path,
36
20
  snooper_to_methods,
37
21
  )
38
- from datamodel_code_generator.format import PythonVersion
22
+ from datamodel_code_generator.format import PythonVersion, PythonVersionMin
39
23
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
40
24
  from datamodel_code_generator.model import pydantic as pydantic_model
41
25
  from datamodel_code_generator.model.base import UNDEFINED, get_module_name
@@ -61,7 +45,6 @@ from datamodel_code_generator.types import (
61
45
  from datamodel_code_generator.util import (
62
46
  PYDANTIC_V2,
63
47
  BaseModel,
64
- cached_property,
65
48
  field_validator,
66
49
  model_validator,
67
50
  )
@@ -71,6 +54,9 @@ if PYDANTIC_V2:
71
54
 
72
55
  from datamodel_code_generator.format import DatetimeClassType
73
56
 
57
+ if TYPE_CHECKING:
58
+ from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence
59
+
74
60
 
75
61
  def get_model_by_path(schema: dict[str, Any] | list[Any], keys: list[str] | list[int]) -> dict[Any, Any]:
76
62
  model: dict[Any, Any] | list[Any]
@@ -149,7 +135,7 @@ class JSONReference(_enum.Enum):
149
135
 
150
136
  class Discriminator(BaseModel):
151
137
  propertyName: str # noqa: N815
152
- mapping: Optional[Dict[str, str]] = None # noqa: UP006, UP045
138
+ mapping: Optional[dict[str, str]] = None # noqa: UP045
153
139
 
154
140
 
155
141
  class JsonSchemaObject(BaseModel):
@@ -170,7 +156,7 @@ class JsonSchemaObject(BaseModel):
170
156
  def model_rebuild(cls) -> None:
171
157
  cls.update_forward_refs()
172
158
 
173
- __constraint_fields__: Set[str] = { # noqa: RUF012, UP006
159
+ __constraint_fields__: set[str] = { # noqa: RUF012
174
160
  "exclusiveMinimum",
175
161
  "minimum",
176
162
  "exclusiveMaximum",
@@ -214,9 +200,9 @@ class JsonSchemaObject(BaseModel):
214
200
  return value.replace("#", "#/")
215
201
  return value
216
202
 
217
- items: Optional[Union[List[JsonSchemaObject], JsonSchemaObject, bool]] = None # noqa: UP006, UP007, UP045
203
+ items: Optional[Union[list[JsonSchemaObject], JsonSchemaObject, bool]] = None # noqa: UP007, UP045
218
204
  uniqueItems: Optional[bool] = None # noqa: N815, UP045
219
- type: Optional[Union[str, List[str]]] = None # noqa: UP006, UP007, UP045
205
+ type: Optional[Union[str, list[str]]] = None # noqa: UP007, UP045
220
206
  format: Optional[str] = None # noqa: UP045
221
207
  pattern: Optional[str] = None # noqa: UP045
222
208
  minLength: Optional[int] = None # noqa: N815,UP045
@@ -229,18 +215,18 @@ class JsonSchemaObject(BaseModel):
229
215
  exclusiveMaximum: Optional[Union[float, bool]] = None # noqa: N815, UP007, UP045
230
216
  exclusiveMinimum: Optional[Union[float, bool]] = None # noqa: N815, UP007, UP045
231
217
  additionalProperties: Optional[Union[JsonSchemaObject, bool]] = None # noqa: N815, UP007, UP045
232
- patternProperties: Optional[Dict[str, JsonSchemaObject]] = None # noqa: N815, UP006, UP045
233
- oneOf: List[JsonSchemaObject] = [] # noqa: N815, RUF012, UP006
234
- anyOf: List[JsonSchemaObject] = [] # noqa: N815, RUF012, UP006
235
- allOf: List[JsonSchemaObject] = [] # noqa: N815, RUF012, UP006
236
- enum: List[Any] = [] # noqa: RUF012, UP006
218
+ patternProperties: Optional[dict[str, JsonSchemaObject]] = None # noqa: N815, UP045
219
+ oneOf: list[JsonSchemaObject] = [] # noqa: N815, RUF012
220
+ anyOf: list[JsonSchemaObject] = [] # noqa: N815, RUF012
221
+ allOf: list[JsonSchemaObject] = [] # noqa: N815, RUF012
222
+ enum: list[Any] = [] # noqa: RUF012
237
223
  writeOnly: Optional[bool] = None # noqa: N815, UP045
238
224
  readOnly: Optional[bool] = None # noqa: N815, UP045
239
- properties: Optional[Dict[str, Union[JsonSchemaObject, bool]]] = None # noqa: UP006, UP007, UP045
240
- required: List[str] = [] # noqa: RUF012, UP006
225
+ properties: Optional[dict[str, Union[JsonSchemaObject, bool]]] = None # noqa: UP007, UP045
226
+ required: list[str] = [] # noqa: RUF012
241
227
  ref: Optional[str] = Field(default=None, alias="$ref") # noqa: UP045
242
228
  nullable: Optional[bool] = False # noqa: UP045
243
- x_enum_varnames: List[str] = Field(default=[], alias="x-enum-varnames") # noqa: UP006
229
+ x_enum_varnames: list[str] = Field(default=[], alias="x-enum-varnames")
244
230
  description: Optional[str] = None # noqa: UP045
245
231
  title: Optional[str] = None # noqa: UP045
246
232
  example: Any = None
@@ -249,7 +235,7 @@ class JsonSchemaObject(BaseModel):
249
235
  id: Optional[str] = Field(default=None, alias="$id") # noqa: UP045
250
236
  custom_type_path: Optional[str] = Field(default=None, alias="customTypePath") # noqa: UP045
251
237
  custom_base_path: Optional[str] = Field(default=None, alias="customBasePath") # noqa: UP045
252
- extras: Dict[str, Any] = Field(alias=__extra_key__, default_factory=dict) # noqa: UP006
238
+ extras: dict[str, Any] = Field(alias=__extra_key__, default_factory=dict)
253
239
  discriminator: Optional[Union[Discriminator, str]] = None # noqa: UP007, UP045
254
240
  if PYDANTIC_V2:
255
241
  model_config = ConfigDict( # pyright: ignore[reportPossiblyUnboundVariable]
@@ -359,8 +345,8 @@ EXCLUDE_FIELD_KEYS = (
359
345
 
360
346
  @snooper_to_methods() # noqa: PLR0904
361
347
  class JsonSchemaParser(Parser):
362
- SCHEMA_PATHS: ClassVar[List[str]] = ["#/definitions", "#/$defs"] # noqa: UP006
363
- SCHEMA_OBJECT_TYPE: ClassVar[Type[JsonSchemaObject]] = JsonSchemaObject # noqa: UP006
348
+ SCHEMA_PATHS: ClassVar[list[str]] = ["#/definitions", "#/$defs"]
349
+ SCHEMA_OBJECT_TYPE: ClassVar[type[JsonSchemaObject]] = JsonSchemaObject
364
350
 
365
351
  def __init__( # noqa: PLR0913
366
352
  self,
@@ -374,7 +360,7 @@ class JsonSchemaParser(Parser):
374
360
  additional_imports: list[str] | None = None,
375
361
  custom_template_dir: Path | None = None,
376
362
  extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
377
- target_python_version: PythonVersion = PythonVersion.PY_38,
363
+ target_python_version: PythonVersion = PythonVersionMin,
378
364
  dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
379
365
  validation: bool = False,
380
366
  field_constraints: bool = False,
@@ -1449,8 +1435,9 @@ class JsonSchemaParser(Parser):
1449
1435
  relative_path, object_path = ref.split("#")
1450
1436
  relative_paths = relative_path.split("/")
1451
1437
  base_path = Path(*relative_paths).parent
1452
- with self.model_resolver.current_base_path_context(base_path), self.model_resolver.base_url_context(
1453
- relative_path
1438
+ with (
1439
+ self.model_resolver.current_base_path_context(base_path),
1440
+ self.model_resolver.base_url_context(relative_path),
1454
1441
  ):
1455
1442
  self._parse_file(
1456
1443
  self._get_ref_body(relative_path),
@@ -1565,9 +1552,10 @@ class JsonSchemaParser(Parser):
1565
1552
  path_parts = list(source.path.parts)
1566
1553
  if self.current_source_path is not None:
1567
1554
  self.current_source_path = source.path
1568
- with self.model_resolver.current_base_path_context(
1569
- source.path.parent
1570
- ), self.model_resolver.current_root_context(path_parts):
1555
+ with (
1556
+ self.model_resolver.current_base_path_context(source.path.parent),
1557
+ self.model_resolver.current_root_context(path_parts),
1558
+ ):
1571
1559
  yield source, path_parts
1572
1560
 
1573
1561
  def parse_raw(self) -> None:
@@ -1602,9 +1590,10 @@ class JsonSchemaParser(Parser):
1602
1590
  if self.current_source_path is not None:
1603
1591
  self.current_source_path = source.path
1604
1592
 
1605
- with self.model_resolver.current_base_path_context(
1606
- source.path.parent
1607
- ), self.model_resolver.current_root_context(path_parts):
1593
+ with (
1594
+ self.model_resolver.current_base_path_context(source.path.parent),
1595
+ self.model_resolver.current_root_context(path_parts),
1596
+ ):
1608
1597
  for reserved_ref in sorted(reserved_refs):
1609
1598
  if self.model_resolver.add_ref(reserved_ref, resolved=True).loaded:
1610
1599
  continue