datamodel-code-generator 0.18.1__py3-none-any.whl → 0.20.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (25) hide show
  1. datamodel_code_generator/__init__.py +10 -4
  2. datamodel_code_generator/__main__.py +23 -1
  3. datamodel_code_generator/format.py +32 -2
  4. datamodel_code_generator/imports.py +1 -0
  5. datamodel_code_generator/model/__init__.py +17 -3
  6. datamodel_code_generator/model/base.py +7 -1
  7. datamodel_code_generator/model/dataclass.py +8 -2
  8. datamodel_code_generator/model/improts.py +4 -0
  9. datamodel_code_generator/model/pydantic/types.py +1 -1
  10. datamodel_code_generator/model/template/TypedDict.jinja2 +5 -0
  11. datamodel_code_generator/model/template/TypedDictClass.jinja2 +17 -0
  12. datamodel_code_generator/model/template/TypedDictFunction.jinja2 +16 -0
  13. datamodel_code_generator/model/typed_dict.py +151 -0
  14. datamodel_code_generator/model/types.py +1 -1
  15. datamodel_code_generator/parser/base.py +1 -6
  16. datamodel_code_generator/parser/jsonschema.py +99 -33
  17. datamodel_code_generator/parser/openapi.py +45 -54
  18. datamodel_code_generator/types.py +13 -4
  19. datamodel_code_generator/version.py +1 -1
  20. {datamodel_code_generator-0.18.1.dist-info → datamodel_code_generator-0.20.0.dist-info}/METADATA +12 -51
  21. datamodel_code_generator-0.20.0.dist-info/RECORD +42 -0
  22. {datamodel_code_generator-0.18.1.dist-info → datamodel_code_generator-0.20.0.dist-info}/WHEEL +1 -1
  23. datamodel_code_generator-0.18.1.dist-info/RECORD +0 -38
  24. {datamodel_code_generator-0.18.1.dist-info → datamodel_code_generator-0.20.0.dist-info}/LICENSE +0 -0
  25. {datamodel_code_generator-0.18.1.dist-info → datamodel_code_generator-0.20.0.dist-info}/entry_points.txt +0 -0
@@ -220,6 +220,7 @@ RAW_DATA_TYPES: List[InputFileType] = [
220
220
  class DataModelType(Enum):
221
221
  PydanticBaseModel = 'pydantic.BaseModel'
222
222
  DataclassesDataclass = 'dataclasses.dataclass'
223
+ TypingTypedDict = 'typing.TypedDict'
223
224
 
224
225
 
225
226
  class OpenAPIScope(Enum):
@@ -314,6 +315,7 @@ def generate(
314
315
  capitalise_enum_members: bool = False,
315
316
  keep_model_order: bool = False,
316
317
  custom_file_header: Optional[str] = None,
318
+ custom_file_header_path: Optional[Path] = None,
317
319
  ) -> None:
318
320
  remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
319
321
  if isinstance(input_, str):
@@ -397,7 +399,7 @@ def generate(
397
399
 
398
400
  from datamodel_code_generator.model import get_data_model_types
399
401
 
400
- data_model_types = get_data_model_types(output_model_type)
402
+ data_model_types = get_data_model_types(output_model_type, target_python_version)
401
403
  parser = parser_class(
402
404
  source=input_text or input_,
403
405
  data_model_type=data_model_types.data_model,
@@ -427,7 +429,9 @@ def generate(
427
429
  use_field_description=use_field_description,
428
430
  use_default_kwarg=use_default_kwarg,
429
431
  reuse_model=reuse_model,
430
- enum_field_as_literal=enum_field_as_literal,
432
+ enum_field_as_literal=LiteralType.All
433
+ if output_model_type == DataModelType.TypingTypedDict
434
+ else enum_field_as_literal,
431
435
  use_one_literal_as_default=use_one_literal_as_default,
432
436
  set_default_enum_member=set_default_enum_member,
433
437
  use_subclass_enum=use_subclass_enum,
@@ -488,6 +492,9 @@ def generate(
488
492
 
489
493
  timestamp = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
490
494
 
495
+ if custom_file_header is None and custom_file_header_path:
496
+ custom_file_header = custom_file_header_path.read_text(encoding=encoding)
497
+
491
498
  header = """\
492
499
  # generated by datamodel-codegen:
493
500
  # filename: {}"""
@@ -497,8 +504,7 @@ def generate(
497
504
  header += f'\n# version: {get_version()}'
498
505
 
499
506
  file: Optional[IO[Any]]
500
- for path, body_and_filename in modules.items():
501
- body, filename = body_and_filename
507
+ for path, (body, filename) in modules.items():
502
508
  if path is None:
503
509
  file = None
504
510
  else:
@@ -433,6 +433,15 @@ arg_parser.add_argument(
433
433
  arg_parser.add_argument(
434
434
  '--custom-file-header', help='Custom file header', type=str, default=None
435
435
  )
436
+
437
+ arg_parser.add_argument(
438
+ '--custom-file-header-path',
439
+ help='Custom file header file path',
440
+ default=None,
441
+ type=str,
442
+ )
443
+
444
+
436
445
  arg_parser.add_argument('--version', help='show version', action='store_true')
437
446
 
438
447
 
@@ -448,7 +457,9 @@ class Config(BaseModel):
448
457
  return value
449
458
  return cast(TextIOBase, Path(value).expanduser().resolve().open('rt'))
450
459
 
451
- @validator('input', 'output', 'custom_template_dir', pre=True)
460
+ @validator(
461
+ 'input', 'output', 'custom_template_dir', 'custom_file_header_path', pre=True
462
+ )
452
463
  def validate_path(cls, value: Any) -> Optional[Path]:
453
464
  if value is None or isinstance(value, Path):
454
465
  return value # pragma: no cover
@@ -488,6 +499,14 @@ class Config(BaseModel):
488
499
  )
489
500
  return values
490
501
 
502
+ @root_validator
503
+ def validate_custom_file_header(cls, values: Dict[str, Any]) -> Dict[str, Any]:
504
+ if values.get('custom_file_header') and values.get('custom_file_header_path'):
505
+ raise Error(
506
+ '`--custom_file_header_path` can not be used with `--custom_file_header`.'
507
+ ) # pragma: no cover
508
+ return values
509
+
491
510
  # Pydantic 1.5.1 doesn't support each_item=True correctly
492
511
  @validator('http_headers', pre=True)
493
512
  def validate_http_headers(cls, value: Any) -> Optional[List[Tuple[str, str]]]:
@@ -583,6 +602,7 @@ class Config(BaseModel):
583
602
  capitalise_enum_members: bool = False
584
603
  keep_model_order: bool = False
585
604
  custom_file_header: Optional[str] = None
605
+ custom_file_header_path: Optional[Path] = None
586
606
 
587
607
  def merge_args(self, args: Namespace) -> None:
588
608
  set_args = {
@@ -742,6 +762,8 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
742
762
  remove_special_field_name_prefix=config.remove_special_field_name_prefix,
743
763
  capitalise_enum_members=config.capitalise_enum_members,
744
764
  keep_model_order=config.keep_model_order,
765
+ custom_file_header=config.custom_file_header,
766
+ custom_file_header_path=config.custom_file_header_path,
745
767
  )
746
768
  return Exit.OK
747
769
  except InvalidClassNameError as e:
@@ -9,6 +9,8 @@ import black
9
9
  import isort
10
10
  import toml
11
11
 
12
+ from datamodel_code_generator import cached_property
13
+
12
14
 
13
15
  class PythonVersion(Enum):
14
16
  PY_36 = '3.6'
@@ -18,13 +20,41 @@ class PythonVersion(Enum):
18
20
  PY_310 = '3.10'
19
21
  PY_311 = '3.11'
20
22
 
23
+ @cached_property
24
+ def _is_py_38_or_later(self) -> bool: # pragma: no cover
25
+ return self.value not in {self.PY_36.value, self.PY_37.value} # type: ignore
26
+
27
+ @cached_property
28
+ def _is_py_39_or_later(self) -> bool: # pragma: no cover
29
+ return self.value not in {self.PY_36.value, self.PY_37.value, self.PY_38.value} # type: ignore
30
+
31
+ @cached_property
32
+ def _is_py_310_or_later(self) -> bool: # pragma: no cover
33
+ return self.value not in {self.PY_36.value, self.PY_37.value, self.PY_38.value, self.PY_39.value} # type: ignore
34
+
35
+ @cached_property
36
+ def _is_py_311_or_later(self) -> bool: # pragma: no cover
37
+ return self.value not in {self.PY_36.value, self.PY_37.value, self.PY_38.value, self.PY_39.value, self.PY_310.value} # type: ignore
38
+
21
39
  @property
22
40
  def has_literal_type(self) -> bool:
23
- return self.value not in {self.PY_36.value, self.PY_37.value} # type: ignore
41
+ return self._is_py_38_or_later
24
42
 
25
43
  @property
26
44
  def has_union_operator(self) -> bool: # pragma: no cover
27
- return self.value not in {self.PY_36.value, self.PY_37.value, self.PY_38.value, self.PY_39.value} # type: ignore
45
+ return self._is_py_310_or_later
46
+
47
+ @property
48
+ def has_annotated_type(self) -> bool:
49
+ return self._is_py_39_or_later
50
+
51
+ @property
52
+ def has_typed_dict(self) -> bool:
53
+ return self._is_py_38_or_later
54
+
55
+ @property
56
+ def has_typed_dict_non_required(self) -> bool:
57
+ return self._is_py_311_or_later
28
58
 
29
59
 
30
60
  if TYPE_CHECKING:
@@ -61,6 +61,7 @@ class Imports(DefaultDict[Optional[str], Set[str]]):
61
61
 
62
62
 
63
63
  IMPORT_ANNOTATED = Import.from_full_path('typing.Annotated')
64
+ IMPORT_ANNOTATED_BACKPORT = Import.from_full_path('typing_extensions.Annotated')
64
65
  IMPORT_ANY = Import.from_full_path('typing.Any')
65
66
  IMPORT_LIST = Import.from_full_path('typing.List')
66
67
  IMPORT_UNION = Import.from_full_path('typing.Union')
@@ -6,7 +6,7 @@ from ..types import DataTypeManager as DataTypeManagerABC
6
6
  from .base import ConstraintsBase, DataModel, DataModelFieldBase
7
7
 
8
8
  if TYPE_CHECKING:
9
- from .. import DataModelType
9
+ from .. import DataModelType, PythonVersion
10
10
 
11
11
 
12
12
  class DataModelSet(NamedTuple):
@@ -17,9 +17,11 @@ class DataModelSet(NamedTuple):
17
17
  dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]]
18
18
 
19
19
 
20
- def get_data_model_types(data_model_type: DataModelType) -> DataModelSet:
20
+ def get_data_model_types(
21
+ data_model_type: DataModelType, target_python_version: PythonVersion
22
+ ) -> DataModelSet:
21
23
  from .. import DataModelType
22
- from . import dataclass, pydantic, rootmodel
24
+ from . import dataclass, pydantic, rootmodel, typed_dict
23
25
  from .types import DataTypeManager
24
26
 
25
27
  if data_model_type == DataModelType.PydanticBaseModel:
@@ -38,6 +40,18 @@ def get_data_model_types(data_model_type: DataModelType) -> DataModelSet:
38
40
  data_type_manager=DataTypeManager,
39
41
  dump_resolve_reference_action=None,
40
42
  )
43
+ elif data_model_type == DataModelType.TypingTypedDict:
44
+ return DataModelSet(
45
+ data_model=typed_dict.TypedDict
46
+ if target_python_version.has_typed_dict
47
+ else typed_dict.TypedDictBackport,
48
+ root_model=rootmodel.RootModel,
49
+ field_model=typed_dict.DataModelField
50
+ if target_python_version.has_typed_dict_non_required
51
+ else typed_dict.DataModelFieldBackport,
52
+ data_type_manager=DataTypeManager,
53
+ dump_resolve_reference_action=None,
54
+ )
41
55
  raise ValueError(
42
56
  f'{data_model_type} is unsupported data model type'
43
57
  ) # pragma: no cover
@@ -25,6 +25,7 @@ from jinja2 import Environment, FileSystemLoader, Template
25
25
  from datamodel_code_generator import cached_property
26
26
  from datamodel_code_generator.imports import (
27
27
  IMPORT_ANNOTATED,
28
+ IMPORT_ANNOTATED_BACKPORT,
28
29
  IMPORT_OPTIONAL,
29
30
  IMPORT_UNION,
30
31
  Import,
@@ -125,7 +126,12 @@ class DataModelFieldBase(_BaseModel):
125
126
  ) and not self.data_type.use_union_operator:
126
127
  imports.append((IMPORT_OPTIONAL,))
127
128
  if self.use_annotated:
128
- imports.append((IMPORT_ANNOTATED,))
129
+ import_annotated = (
130
+ IMPORT_ANNOTATED
131
+ if self.data_type.python_version.has_annotated_type
132
+ else IMPORT_ANNOTATED_BACKPORT
133
+ )
134
+ imports.append((import_annotated,))
129
135
  return chain_as_tuple(*imports)
130
136
 
131
137
  @property
@@ -12,6 +12,13 @@ from datamodel_code_generator.reference import Reference
12
12
  from datamodel_code_generator.types import chain_as_tuple
13
13
 
14
14
 
15
+ def _has_field_assignment(field: DataModelFieldBase) -> bool:
16
+ return bool(field.field) or not (
17
+ field.required
18
+ or (field.represented_default == 'None' and field.strip_default_none)
19
+ )
20
+
21
+
15
22
  class DataClass(DataModel):
16
23
  TEMPLATE_FILE_PATH: ClassVar[str] = 'dataclass.jinja2'
17
24
  DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_DATACLASS,)
@@ -34,7 +41,7 @@ class DataClass(DataModel):
34
41
  ) -> None:
35
42
  super().__init__(
36
43
  reference=reference,
37
- fields=fields,
44
+ fields=sorted(fields, key=_has_field_assignment, reverse=False),
38
45
  decorators=decorators,
39
46
  base_classes=base_classes,
40
47
  custom_base_class=custom_base_class,
@@ -46,7 +53,6 @@ class DataClass(DataModel):
46
53
  default=default,
47
54
  nullable=nullable,
48
55
  )
49
- self.fields.sort(key=str, reverse=False)
50
56
 
51
57
  @property
52
58
  def imports(self) -> Tuple[Import, ...]:
@@ -2,3 +2,7 @@ from datamodel_code_generator.imports import Import
2
2
 
3
3
  IMPORT_DATACLASS = Import.from_full_path('dataclasses.dataclass')
4
4
  IMPORT_FIELD = Import.from_full_path('dataclasses.field')
5
+ IMPORT_TYPED_DICT = Import.from_full_path('typing.TypedDict')
6
+ IMPORT_TYPED_DICT_BACKPORT = Import.from_full_path('typing_extensions.TypedDict')
7
+ IMPORT_NOT_REQUIRED = Import.from_full_path('typing.NotRequired')
8
+ IMPORT_NOT_REQUIRED_BACKPORT = Import.from_full_path('typing_extensions.NotRequired')
@@ -93,7 +93,7 @@ def type_map_factory(
93
93
  Types.ipv6_network: data_type.from_import(IMPORT_IPV6NETWORKS),
94
94
  Types.boolean: data_type(type='bool'),
95
95
  Types.object: data_type.from_import(IMPORT_ANY, is_dict=True),
96
- Types.null: data_type.from_import(IMPORT_ANY, is_optional=True),
96
+ Types.null: data_type(type='None'),
97
97
  Types.array: data_type.from_import(IMPORT_ANY, is_list=True),
98
98
  Types.any: data_type.from_import(IMPORT_ANY),
99
99
  }
@@ -0,0 +1,5 @@
1
+ {%- if is_functional_syntax %}
2
+ {% include 'TypedDictFunction.jinja2' %}
3
+ {%- else %}
4
+ {% include 'TypedDictClass.jinja2' %}
5
+ {%- endif %}
@@ -0,0 +1,17 @@
1
+ class {{ class_name }}({{ base_class }}):
2
+ {%- if description %}
3
+ """
4
+ {{ description | indent(4) }}
5
+ """
6
+ {%- endif %}
7
+ {%- if not fields and not description %}
8
+ pass
9
+ {%- endif %}
10
+ {%- for field in fields %}
11
+ {{ field.name }}: {{ field.type_hint }}
12
+ {%- if field.docstring %}
13
+ """
14
+ {{ field.docstring | indent(4) }}
15
+ """
16
+ {%- endif %}
17
+ {%- endfor -%}
@@ -0,0 +1,16 @@
1
+ {%- if description %}
2
+ """
3
+ {{ description | indent(4) }}
4
+ """
5
+ {%- endif %}
6
+ {{ class_name }} = TypedDict('{{ class_name }}', {
7
+ {%- for field in all_fields %}
8
+ '{{ field.key }}': {{ field.type_hint }},
9
+ {%- if field.docstring %}
10
+ """
11
+ {{ field.docstring | indent(4) }}
12
+ """
13
+ {%- endif %}
14
+ {%- endfor -%}
15
+ })
16
+
@@ -0,0 +1,151 @@
1
+ from __future__ import annotations
2
+
3
+ import keyword
4
+ from pathlib import Path
5
+ from typing import (
6
+ Any,
7
+ ClassVar,
8
+ DefaultDict,
9
+ Dict,
10
+ Iterator,
11
+ List,
12
+ Optional,
13
+ Tuple,
14
+ )
15
+
16
+ from datamodel_code_generator.imports import Import
17
+ from datamodel_code_generator.model import DataModel, DataModelFieldBase
18
+ from datamodel_code_generator.model.base import UNDEFINED
19
+ from datamodel_code_generator.model.improts import (
20
+ IMPORT_NOT_REQUIRED,
21
+ IMPORT_NOT_REQUIRED_BACKPORT,
22
+ IMPORT_TYPED_DICT,
23
+ IMPORT_TYPED_DICT_BACKPORT,
24
+ )
25
+ from datamodel_code_generator.reference import Reference
26
+ from datamodel_code_generator.types import NOT_REQUIRED_PREFIX
27
+
28
+ escape_characters = str.maketrans(
29
+ {
30
+ '\\': r'\\',
31
+ "'": r"\'",
32
+ '\b': r'\b',
33
+ '\f': r'\f',
34
+ '\n': r'\n',
35
+ '\r': r'\r',
36
+ '\t': r'\t',
37
+ }
38
+ )
39
+
40
+
41
+ def _is_valid_field_name(field: DataModelFieldBase) -> bool:
42
+ name = field.original_name or field.name
43
+ if name is None: # pragma: no cover
44
+ return False
45
+ return name.isidentifier() and not keyword.iskeyword(name)
46
+
47
+
48
+ class TypedDict(DataModel):
49
+ TEMPLATE_FILE_PATH: ClassVar[str] = 'TypedDict.jinja2'
50
+ BASE_CLASS: ClassVar[str] = 'typing.TypedDict'
51
+ DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPED_DICT,)
52
+
53
+ def __init__(
54
+ self,
55
+ *,
56
+ reference: Reference,
57
+ fields: List[DataModelFieldBase],
58
+ decorators: Optional[List[str]] = None,
59
+ base_classes: Optional[List[Reference]] = None,
60
+ custom_base_class: Optional[str] = None,
61
+ custom_template_dir: Optional[Path] = None,
62
+ extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
63
+ methods: Optional[List[str]] = None,
64
+ path: Optional[Path] = None,
65
+ description: Optional[str] = None,
66
+ default: Any = UNDEFINED,
67
+ nullable: bool = False,
68
+ ) -> None:
69
+ super().__init__(
70
+ reference=reference,
71
+ fields=fields,
72
+ decorators=decorators,
73
+ base_classes=base_classes,
74
+ custom_base_class=custom_base_class,
75
+ custom_template_dir=custom_template_dir,
76
+ extra_template_data=extra_template_data,
77
+ methods=methods,
78
+ path=path,
79
+ description=description,
80
+ default=default,
81
+ nullable=nullable,
82
+ )
83
+
84
+ @property
85
+ def is_functional_syntax(self) -> bool:
86
+ return any(not _is_valid_field_name(f) for f in self.fields)
87
+
88
+ @property
89
+ def all_fields(self) -> Iterator[DataModelFieldBase]:
90
+ for base_class in self.base_classes:
91
+ if base_class.reference is None: # pragma: no cover
92
+ continue
93
+ data_model = base_class.reference.source
94
+ if not isinstance(data_model, DataModel): # pragma: no cover
95
+ continue
96
+
97
+ if isinstance(data_model, TypedDict): # pragma: no cover
98
+ yield from data_model.all_fields
99
+
100
+ yield from self.fields
101
+
102
+ def render(self, *, class_name: Optional[str] = None) -> str:
103
+ response = self._render(
104
+ class_name=class_name or self.class_name,
105
+ fields=self.fields,
106
+ decorators=self.decorators,
107
+ base_class=self.base_class,
108
+ methods=self.methods,
109
+ description=self.description,
110
+ is_functional_syntax=self.is_functional_syntax,
111
+ all_fields=self.all_fields,
112
+ **self.extra_template_data,
113
+ )
114
+ return response
115
+
116
+
117
+ class TypedDictBackport(TypedDict):
118
+ BASE_CLASS: ClassVar[str] = 'typing_extensions.TypedDict'
119
+ DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPED_DICT_BACKPORT,)
120
+
121
+
122
+ class DataModelField(DataModelFieldBase):
123
+ DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_NOT_REQUIRED,)
124
+
125
+ @property
126
+ def key(self) -> str:
127
+ return (self.original_name or self.name or '').translate( # pragma: no cover
128
+ escape_characters
129
+ )
130
+
131
+ @property
132
+ def type_hint(self) -> str:
133
+ type_hint = super().type_hint
134
+ if self._not_required:
135
+ return f'{NOT_REQUIRED_PREFIX}{type_hint}]'
136
+ return type_hint
137
+
138
+ @property
139
+ def _not_required(self) -> bool:
140
+ return not self.required and isinstance(self.parent, TypedDict)
141
+
142
+ @property
143
+ def imports(self) -> Tuple[Import, ...]:
144
+ return (
145
+ *super().imports,
146
+ *(self.DEFAULT_IMPORTS if self._not_required else ()),
147
+ )
148
+
149
+
150
+ class DataModelFieldBackport(DataModelField):
151
+ DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_NOT_REQUIRED_BACKPORT,)
@@ -43,7 +43,7 @@ def type_map_factory(
43
43
  Types.ipv6_network: data_type_str,
44
44
  Types.boolean: data_type(type='bool'),
45
45
  Types.object: data_type.from_import(IMPORT_ANY, is_dict=True),
46
- Types.null: data_type.from_import(IMPORT_ANY, is_optional=True),
46
+ Types.null: data_type(type='None'),
47
47
  Types.array: data_type.from_import(IMPORT_ANY, is_list=True),
48
48
  Types.any: data_type.from_import(IMPORT_ANY),
49
49
  }
@@ -485,12 +485,7 @@ class Parser(ABC):
485
485
  yield Source(path=Path(), text=self.source)
486
486
  elif isinstance(self.source, Path): # pragma: no cover
487
487
  if self.source.is_dir():
488
- paths = (
489
- sorted(self.source.rglob('*'))
490
- if self.keep_model_order
491
- else self.source.rglob('*')
492
- )
493
- for path in paths:
488
+ for path in sorted(self.source.rglob('*'), key=lambda p: p.name):
494
489
  if path.is_file():
495
490
  yield Source.from_path(path, self.base_path, self.encoding)
496
491
  else:
@@ -14,6 +14,7 @@ from typing import (
14
14
  Dict,
15
15
  Generator,
16
16
  Iterable,
17
+ Iterator,
17
18
  List,
18
19
  Mapping,
19
20
  Optional,
@@ -43,6 +44,7 @@ from datamodel_code_generator.model.enum import Enum
43
44
  from datamodel_code_generator.parser import DefaultPutDict, LiteralType
44
45
  from datamodel_code_generator.parser.base import (
45
46
  Parser,
47
+ Source,
46
48
  escape_characters,
47
49
  title_to_class_name,
48
50
  )
@@ -224,6 +226,7 @@ class JsonSchemaObject(BaseModel):
224
226
  default: Any
225
227
  id: Optional[str] = Field(default=None, alias='$id')
226
228
  custom_type_path: Optional[str] = Field(default=None, alias='customTypePath')
229
+ custom_base_path: Optional[str] = Field(default=None, alias='customBasePath')
227
230
  extras: Dict[str, Any] = Field(alias=__extra_key__, default_factory=dict)
228
231
  discriminator: Union[Discriminator, str, None]
229
232
 
@@ -661,7 +664,7 @@ class JsonSchemaParser(Parser):
661
664
  reference=reference,
662
665
  fields=fields,
663
666
  base_classes=base_classes,
664
- custom_base_class=self.base_class,
667
+ custom_base_class=obj.custom_base_path or self.base_class,
665
668
  custom_template_dir=self.custom_template_dir,
666
669
  extra_template_data=self.extra_template_data,
667
670
  path=self.current_source_path,
@@ -679,6 +682,7 @@ class JsonSchemaParser(Parser):
679
682
  fields: List[DataModelFieldBase],
680
683
  base_classes: List[Reference],
681
684
  required: List[str],
685
+ union_models: List[Reference],
682
686
  ) -> None:
683
687
  for all_of_item in obj.allOf:
684
688
  if all_of_item.ref: # $ref
@@ -697,8 +701,26 @@ class JsonSchemaParser(Parser):
697
701
  if all_of_item.required:
698
702
  required.extend(all_of_item.required)
699
703
  self._parse_all_of_item(
700
- name, all_of_item, path, fields, base_classes, required
704
+ name,
705
+ all_of_item,
706
+ path,
707
+ fields,
708
+ base_classes,
709
+ required,
710
+ union_models,
701
711
  )
712
+ if all_of_item.anyOf:
713
+ union_models.extend(
714
+ d.reference
715
+ for d in self.parse_any_of(name, all_of_item, path)
716
+ if d.reference
717
+ )
718
+ if all_of_item.oneOf:
719
+ union_models.extend(
720
+ d.reference
721
+ for d in self.parse_one_of(name, all_of_item, path)
722
+ if d.reference
723
+ )
702
724
 
703
725
  def parse_all_of(
704
726
  self,
@@ -717,10 +739,58 @@ class JsonSchemaParser(Parser):
717
739
  fields: List[DataModelFieldBase] = []
718
740
  base_classes: List[Reference] = []
719
741
  required: List[str] = []
720
- self._parse_all_of_item(name, obj, path, fields, base_classes, required)
721
- return self._parse_object_common_part(
722
- name, obj, path, ignore_duplicate_model, fields, base_classes, required
742
+ union_models: List[Reference] = []
743
+ self._parse_all_of_item(
744
+ name, obj, path, fields, base_classes, required, union_models
745
+ )
746
+ if not union_models:
747
+ return self._parse_object_common_part(
748
+ name, obj, path, ignore_duplicate_model, fields, base_classes, required
749
+ )
750
+ reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
751
+ all_of_data_type = self._parse_object_common_part(
752
+ name,
753
+ obj,
754
+ get_special_path('allOf', path),
755
+ ignore_duplicate_model,
756
+ fields,
757
+ base_classes,
758
+ required,
759
+ )
760
+ data_type = self.data_type(
761
+ data_types=[
762
+ self._parse_object_common_part(
763
+ name,
764
+ obj,
765
+ get_special_path(f'union_model-{index}', path),
766
+ ignore_duplicate_model,
767
+ [],
768
+ [union_model, all_of_data_type.reference], # type: ignore
769
+ [],
770
+ )
771
+ for index, union_model in enumerate(union_models)
772
+ ]
773
+ )
774
+ field = self.get_object_field(
775
+ field_name=None,
776
+ field=obj,
777
+ required=True,
778
+ field_type=data_type,
779
+ alias=None,
780
+ original_field_name=None,
781
+ )
782
+ data_model_root = self.data_model_root_type(
783
+ reference=reference,
784
+ fields=[field],
785
+ custom_base_class=obj.custom_base_path or self.base_class,
786
+ custom_template_dir=self.custom_template_dir,
787
+ extra_template_data=self.extra_template_data,
788
+ path=self.current_source_path,
789
+ description=obj.description if self.use_schema_description else None,
790
+ nullable=obj.type_has_null,
723
791
  )
792
+ self.results.append(data_model_root)
793
+ return self.data_type(reference=reference)
724
794
 
725
795
  def parse_object_fields(
726
796
  self, obj: JsonSchemaObject, path: List[str], module_name: Optional[str] = None
@@ -834,7 +904,7 @@ class JsonSchemaParser(Parser):
834
904
  data_model_type = data_model_type_class(
835
905
  reference=reference,
836
906
  fields=fields,
837
- custom_base_class=self.base_class,
907
+ custom_base_class=obj.custom_base_path or self.base_class,
838
908
  custom_template_dir=self.custom_template_dir,
839
909
  extra_template_data=self.extra_template_data,
840
910
  path=self.current_source_path,
@@ -1084,7 +1154,7 @@ class JsonSchemaParser(Parser):
1084
1154
  data_model_root = self.data_model_root_type(
1085
1155
  reference=reference,
1086
1156
  fields=[field],
1087
- custom_base_class=self.base_class,
1157
+ custom_base_class=obj.custom_base_path or self.base_class,
1088
1158
  custom_template_dir=self.custom_template_dir,
1089
1159
  extra_template_data=self.extra_template_data,
1090
1160
  path=self.current_source_path,
@@ -1164,7 +1234,7 @@ class JsonSchemaParser(Parser):
1164
1234
  has_default=obj.has_default,
1165
1235
  )
1166
1236
  ],
1167
- custom_base_class=self.base_class,
1237
+ custom_base_class=obj.custom_base_path or self.base_class,
1168
1238
  custom_template_dir=self.custom_template_dir,
1169
1239
  extra_template_data=self.extra_template_data,
1170
1240
  path=self.current_source_path,
@@ -1297,7 +1367,7 @@ class JsonSchemaParser(Parser):
1297
1367
  original_name=None,
1298
1368
  )
1299
1369
  ],
1300
- custom_base_class=self.base_class,
1370
+ custom_base_class=obj.custom_base_path or self.base_class,
1301
1371
  custom_template_dir=self.custom_template_dir,
1302
1372
  extra_template_data=self.extra_template_data,
1303
1373
  path=self.current_source_path,
@@ -1458,7 +1528,7 @@ class JsonSchemaParser(Parser):
1458
1528
  self.parse_root_type(name, obj, path)
1459
1529
  self.parse_ref(obj, path)
1460
1530
 
1461
- def parse_raw(self) -> None:
1531
+ def _get_context_source_path_parts(self) -> Iterator[Tuple[Source, List[str]]]:
1462
1532
  if isinstance(self.source, list) or (
1463
1533
  isinstance(self.source, Path) and self.source.is_dir()
1464
1534
  ):
@@ -1478,26 +1548,28 @@ class JsonSchemaParser(Parser):
1478
1548
  with self.model_resolver.current_base_path_context(
1479
1549
  source.path.parent
1480
1550
  ), self.model_resolver.current_root_context(path_parts):
1481
- self.raw_obj = load_yaml(source.text)
1482
- if self.custom_class_name_generator:
1483
- obj_name = self.raw_obj.get('title', 'Model')
1551
+ yield source, path_parts
1552
+
1553
+ def parse_raw(self) -> None:
1554
+ for source, path_parts in self._get_context_source_path_parts():
1555
+ self.raw_obj = load_yaml(source.text)
1556
+ if self.custom_class_name_generator:
1557
+ obj_name = self.raw_obj.get('title', 'Model')
1558
+ else:
1559
+ if self.class_name:
1560
+ obj_name = self.class_name
1484
1561
  else:
1485
- if self.class_name:
1486
- obj_name = self.class_name
1487
- else:
1488
- # backward compatible
1489
- obj_name = self.raw_obj.get('title', 'Model')
1490
- if not self.model_resolver.validate_name(obj_name):
1491
- obj_name = title_to_class_name(obj_name)
1562
+ # backward compatible
1563
+ obj_name = self.raw_obj.get('title', 'Model')
1492
1564
  if not self.model_resolver.validate_name(obj_name):
1493
- raise InvalidClassNameError(obj_name)
1494
- self._parse_file(self.raw_obj, obj_name, path_parts)
1565
+ obj_name = title_to_class_name(obj_name)
1566
+ if not self.model_resolver.validate_name(obj_name):
1567
+ raise InvalidClassNameError(obj_name)
1568
+ self._parse_file(self.raw_obj, obj_name, path_parts)
1495
1569
 
1496
1570
  self._resolve_unparsed_json_pointer()
1497
1571
 
1498
- def _resolve_unparsed_json_pointer(
1499
- self, exclude_path_prefixes: Optional[List[str]] = None
1500
- ) -> None:
1572
+ def _resolve_unparsed_json_pointer(self) -> None:
1501
1573
  model_count: int = len(self.results)
1502
1574
  for source in self.iter_source:
1503
1575
  path_parts = list(source.path.parts)
@@ -1511,14 +1583,6 @@ class JsonSchemaParser(Parser):
1511
1583
  source.path.parent
1512
1584
  ), self.model_resolver.current_root_context(path_parts):
1513
1585
  for reserved_ref in sorted(reserved_refs):
1514
- if exclude_path_prefixes:
1515
- reserved_ref_path_prefix = reserved_ref.split('#/', 1)[-1]
1516
- if any(
1517
- e
1518
- for e in exclude_path_prefixes
1519
- if reserved_ref_path_prefix.startswith(e)
1520
- ): # pragma: no cover
1521
- continue
1522
1586
  if self.model_resolver.add_ref(reserved_ref, resolved=True).loaded:
1523
1587
  continue
1524
1588
  # for root model
@@ -1560,6 +1624,8 @@ class JsonSchemaParser(Parser):
1560
1624
  path, obj_name, unique=False, class_name=True
1561
1625
  ).name
1562
1626
  with self.root_id_context(raw):
1627
+ # Some jsonschema docs include attribute self to have include version details
1628
+ raw.pop('self', None)
1563
1629
  # parse $id before parsing $ref
1564
1630
  root_obj = JsonSchemaObject.parse_obj(raw)
1565
1631
  self.parse_id(root_obj, path_parts)
@@ -301,10 +301,7 @@ class OpenAPIParser(JsonSchemaParser):
301
301
  path: List[str],
302
302
  ) -> DataType:
303
303
  if obj.is_array:
304
- data_type: DataType = self.parse_array_fields(
305
- name, obj, [*path, name], False
306
- ).data_type
307
- # TODO: The List model is not created by this method. Some scenarios may necessitate it.
304
+ data_type = self.parse_array(name, obj, [*path, name])
308
305
  elif obj.allOf: # pragma: no cover
309
306
  data_type = self.parse_all_of(name, obj, path)
310
307
  elif obj.oneOf or obj.anyOf: # pragma: no cover
@@ -522,7 +519,7 @@ class OpenAPIParser(JsonSchemaParser):
522
519
  )
523
520
 
524
521
  def parse_raw(self) -> None:
525
- for source in self.iter_source:
522
+ for source, path_parts in self._get_context_source_path_parts():
526
523
  if self.validation:
527
524
  from prance import BaseParser
528
525
 
@@ -539,55 +536,49 @@ class OpenAPIParser(JsonSchemaParser):
539
536
  security: Optional[List[Dict[str, List[str]]]] = specification.get(
540
537
  'security'
541
538
  )
542
- if isinstance(self.source, ParseResult):
543
- path_parts: List[str] = self.get_url_path_parts(self.source)
544
- else:
545
- path_parts = list(source.path.parts)
546
- with self.model_resolver.current_root_context(path_parts):
547
- if OpenAPIScope.Schemas in self.open_api_scopes:
548
- for (
539
+ if OpenAPIScope.Schemas in self.open_api_scopes:
540
+ for (
541
+ obj_name,
542
+ raw_obj,
543
+ ) in schemas.items(): # type: str, Dict[Any, Any]
544
+ self.parse_raw_obj(
549
545
  obj_name,
550
546
  raw_obj,
551
- ) in schemas.items(): # type: str, Dict[Any, Any]
552
- self.parse_raw_obj(
553
- obj_name,
554
- raw_obj,
555
- [*path_parts, '#/components', 'schemas', obj_name],
547
+ [*path_parts, '#/components', 'schemas', obj_name],
548
+ )
549
+ if OpenAPIScope.Paths in self.open_api_scopes:
550
+ paths: Dict[str, Dict[str, Any]] = specification.get('paths', {})
551
+ parameters: List[Dict[str, Any]] = [
552
+ self._get_ref_body(p['$ref']) if '$ref' in p else p
553
+ for p in paths.get('parameters', [])
554
+ if isinstance(p, dict)
555
+ ]
556
+ paths_path = [*path_parts, '#/paths']
557
+ for path_name, methods in paths.items():
558
+ # Resolve path items if applicable
559
+ if '$ref' in methods:
560
+ methods = self.get_ref_model(methods['$ref'])
561
+ paths_parameters = parameters[:]
562
+ if 'parameters' in methods:
563
+ paths_parameters.extend(methods['parameters'])
564
+ relative_path_name = path_name[1:]
565
+ if relative_path_name:
566
+ path = [*paths_path, relative_path_name]
567
+ else: # pragma: no cover
568
+ path = get_special_path('root', paths_path)
569
+ for operation_name, raw_operation in methods.items():
570
+ if operation_name not in OPERATION_NAMES:
571
+ continue
572
+ if paths_parameters:
573
+ if 'parameters' in raw_operation: # pragma: no cover
574
+ raw_operation['parameters'].extend(paths_parameters)
575
+ else:
576
+ raw_operation['parameters'] = paths_parameters
577
+ if security is not None and 'security' not in raw_operation:
578
+ raw_operation['security'] = security
579
+ self.parse_operation(
580
+ raw_operation,
581
+ [*path, operation_name],
556
582
  )
557
- if OpenAPIScope.Paths in self.open_api_scopes:
558
- paths: Dict[str, Dict[str, Any]] = specification.get('paths', {})
559
- parameters: List[Dict[str, Any]] = [
560
- self._get_ref_body(p['$ref']) if '$ref' in p else p
561
- for p in paths.get('parameters', [])
562
- if isinstance(p, dict)
563
- ]
564
- paths_path = [*path_parts, '#/paths']
565
- for path_name, methods in paths.items():
566
- paths_parameters = parameters[:]
567
- if 'parameters' in methods:
568
- paths_parameters.extend(methods['parameters'])
569
- relative_path_name = path_name[1:]
570
- if relative_path_name:
571
- path = [*paths_path, relative_path_name]
572
- else: # pragma: no cover
573
- path = get_special_path('root', paths_path)
574
- for operation_name, raw_operation in methods.items():
575
- if operation_name not in OPERATION_NAMES:
576
- continue
577
- if paths_parameters:
578
- if 'parameters' in raw_operation: # pragma: no cover
579
- raw_operation['parameters'].extend(paths_parameters)
580
- else:
581
- raw_operation['parameters'] = paths_parameters
582
- if security is not None and 'security' not in raw_operation:
583
- raw_operation['security'] = security
584
- self.parse_operation(
585
- raw_operation,
586
- [*path, operation_name],
587
- )
588
-
589
- if OpenAPIScope.Schemas not in self.open_api_scopes:
590
- exclude_path_prefixes: Optional[List[str]] = ['components/schemas/']
591
- else:
592
- exclude_path_prefixes = None
593
- self._resolve_unparsed_json_pointer(exclude_path_prefixes)
583
+
584
+ self._resolve_unparsed_json_pointer()
@@ -68,6 +68,9 @@ STANDARD_DICT = 'dict'
68
68
  STANDARD_LIST = 'list'
69
69
  STR = 'str'
70
70
 
71
+ NOT_REQUIRED = 'NotRequired'
72
+ NOT_REQUIRED_PREFIX = f'{NOT_REQUIRED}['
73
+
71
74
 
72
75
  class StrictTypes(Enum):
73
76
  str = 'str'
@@ -344,7 +347,7 @@ class DataType(_BaseModel):
344
347
  for t in self.data_types
345
348
  if not (t.type == ANY and t.is_optional)
346
349
  ]
347
- break
350
+ break # pragma: no cover
348
351
 
349
352
  for data_type in self.data_types:
350
353
  if data_type.reference or data_type.data_types:
@@ -364,10 +367,16 @@ class DataType(_BaseModel):
364
367
  if data_type_type in data_types: # pragma: no cover
365
368
  continue
366
369
  data_types.append(data_type_type)
367
- if self.use_union_operator:
368
- type_ = UNION_OPERATOR_DELIMITER.join(data_types)
370
+ if NONE in data_types:
371
+ data_types = [d for d in data_types if d != NONE]
372
+ self.is_optional = True
373
+ if len(data_types) == 1:
374
+ type_ = data_types[0]
369
375
  else:
370
- type_ = f'{UNION_PREFIX}{UNION_DELIMITER.join(data_types)}]'
376
+ if self.use_union_operator:
377
+ type_ = UNION_OPERATOR_DELIMITER.join(data_types)
378
+ else:
379
+ type_ = f'{UNION_PREFIX}{UNION_DELIMITER.join(data_types)}]'
371
380
  elif len(self.data_types) == 1:
372
381
  type_ = self.data_types[0].type_hint
373
382
  elif self.literals:
@@ -1 +1 @@
1
- version: str = '0.18.1'
1
+ version: str = '0.20.0'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamodel-code-generator
3
- Version: 0.18.1
3
+ Version: 0.20.0
4
4
  Summary: Datamodel Code Generator
5
5
  Home-page: https://github.com/koxudaxi/datamodel-code-generator
6
6
  License: MIT
@@ -16,12 +16,6 @@ Classifier: Programming Language :: Python :: 3.8
16
16
  Classifier: Programming Language :: Python :: 3.9
17
17
  Classifier: Programming Language :: Python :: 3.10
18
18
  Classifier: Programming Language :: Python :: 3.11
19
- Classifier: Programming Language :: Python :: 3
20
- Classifier: Programming Language :: Python :: 3.10
21
- Classifier: Programming Language :: Python :: 3.11
22
- Classifier: Programming Language :: Python :: 3.7
23
- Classifier: Programming Language :: Python :: 3.8
24
- Classifier: Programming Language :: Python :: 3.9
25
19
  Classifier: Programming Language :: Python :: Implementation :: CPython
26
20
  Provides-Extra: http
27
21
  Requires-Dist: PySnooper (>=0.4.1,<2.0.0)
@@ -32,7 +26,7 @@ Requires-Dist: httpx ; extra == "http"
32
26
  Requires-Dist: inflect (>=4.1.0,<6.0)
33
27
  Requires-Dist: isort (>=4.3.21,<6.0)
34
28
  Requires-Dist: jinja2 (>=2.10.1,<4.0)
35
- Requires-Dist: openapi-spec-validator (>=0.2.8,<=0.5.1)
29
+ Requires-Dist: openapi-spec-validator (>=0.2.8,<=0.5.2)
36
30
  Requires-Dist: packaging
37
31
  Requires-Dist: prance (>=0.18.2,<1.0)
38
32
  Requires-Dist: pydantic[email] (>=1.10.0,<2.0.0) ; python_version >= "3.11" and python_version < "4.0"
@@ -44,10 +38,11 @@ Description-Content-Type: text/markdown
44
38
 
45
39
  # datamodel-code-generator
46
40
 
47
- This code generator creates [pydantic](https://docs.pydantic.dev/) model and [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html) from an openapi file and others.
41
+ This code generator creates [pydantic](https://docs.pydantic.dev/) model, [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html) and [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict) from an openapi file and others.
48
42
 
49
43
  [![Build Status](https://github.com/koxudaxi/datamodel-code-generator/workflows/Test/badge.svg)](https://github.com/koxudaxi/datamodel-code-generator/actions?query=workflow%3ATest)
50
44
  [![PyPI version](https://badge.fury.io/py/datamodel-code-generator.svg)](https://pypi.python.org/pypi/datamodel-code-generator)
45
+ [![Conda-forge](https://img.shields.io/conda/v/conda-forge/datamodel-code-generator)](https://anaconda.org/conda-forge/datamodel-code-generator)
51
46
  [![Downloads](https://pepy.tech/badge/datamodel-code-generator/month)](https://pepy.tech/project/datamodel-code-generator)
52
47
  [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/datamodel-code-generator)](https://pypi.python.org/pypi/datamodel-code-generator)
53
48
  [![codecov](https://codecov.io/gh/koxudaxi/datamodel-code-generator/branch/master/graph/badge.svg)](https://codecov.io/gh/koxudaxi/datamodel-code-generator)
@@ -68,7 +63,7 @@ To install `datamodel-code-generator`:
68
63
  $ pip install datamodel-code-generator
69
64
  ```
70
65
 
71
- ## Simple usage
66
+ ## Simple Usage
72
67
  You can generate models from a local file.
73
68
  ```bash
74
69
  $ datamodel-codegen --input api.yaml --output model.py
@@ -278,14 +273,14 @@ class Apis(BaseModel):
278
273
  ```
279
274
  </details>
280
275
 
281
- ## Which project uses it?
282
- These OSS use datamodel-code-generator to generate many models. We can learn about use-cases from these projects.
276
+ ## Projects that use datamodel-code-generator
277
+ These OSS projects use datamodel-code-generator to generate many models. See the following linked projects for real world examples and inspiration.
283
278
  - [Netflix/consoleme](https://github.com/Netflix/consoleme)
284
279
  - *[How do I generate models from the Swagger specification?](https://github.com/Netflix/consoleme/blob/master/docs/gitbook/faq.md#how-do-i-generate-models-from-the-swagger-specification)*
285
280
  - [DataDog/integrations-core](https://github.com/DataDog/integrations-core)
286
281
  - *[Config models](https://github.com/DataDog/integrations-core/blob/master/docs/developer/meta/config-models.md)*
287
282
  - [awslabs/aws-lambda-powertools-python](https://github.com/awslabs/aws-lambda-powertools-python)
288
- - *Not used. But, introduced [advanced-use-cases](https://awslabs.github.io/aws-lambda-powertools-python/2.6.0/utilities/parser/#advanced-use-cases) in the official document*
283
+ - *Recommended for [advanced-use-cases](https://awslabs.github.io/aws-lambda-powertools-python/2.6.0/utilities/parser/#advanced-use-cases) in the official documentation*
289
284
  - [open-metadata/OpenMetadata](https://github.com/open-metadata/OpenMetadata)
290
285
  - [Makefile](https://github.com/open-metadata/OpenMetadata/blob/main/Makefile)
291
286
  - [airbytehq/airbyte](https://github.com/airbytehq/airbyte)
@@ -303,6 +298,7 @@ These OSS use datamodel-code-generator to generate many models. We can learn abo
303
298
  ## Supported output types
304
299
  - [pydantic](https://docs.pydantic.dev/).BaseModel
305
300
  - [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html)
301
+ - [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict)
306
302
 
307
303
  ## Installation
308
304
 
@@ -328,7 +324,7 @@ You can genearte models from a URL.
328
324
  ```bash
329
325
  $ datamodel-codegen --url https://<INPUT FILE URL> --output model.py
330
326
  ```
331
- This method needs [http extra option](#http-extra-option)
327
+ This method needs the [http extra option](#http-extra-option)
332
328
 
333
329
 
334
330
  ## All Command Options
@@ -339,7 +335,7 @@ usage: datamodel-codegen [-h] [--input INPUT] [--url URL]
339
335
  [--http-headers HTTP_HEADER [HTTP_HEADER ...]]
340
336
  [--http-ignore-tls]
341
337
  [--input-file-type {auto,openapi,jsonschema,json,yaml,dict,csv}]
342
- [--output-model-type {pydantic.BaseModel,dataclasses.dataclass}]
338
+ [--output-model-type {pydantic.BaseModel,dataclasses.dataclass,typing.TypedDict}]
343
339
  [--openapi-scopes {schemas,paths,tags,parameters} [{schemas,paths,tags,parameters} ...]]
344
340
  [--output OUTPUT] [--base-class BASE_CLASS]
345
341
  [--field-constraints] [--use-annotated]
@@ -390,7 +386,7 @@ options:
390
386
  certificate
391
387
  --input-file-type {auto,openapi,jsonschema,json,yaml,dict,csv}
392
388
  Input file type (default: auto)
393
- --output-model-type {pydantic.BaseModel,dataclasses.dataclass}
389
+ --output-model-type {pydantic.BaseModel,dataclasses.dataclass,typing.TypedDict}
394
390
  Output model type (default: pydantic.BaseModel)
395
391
  --openapi-scopes {schemas,paths,tags,parameters} [{schemas,paths,tags,parameters} ...]
396
392
  Scopes of OpenAPI model generation (default: schemas)
@@ -496,41 +492,6 @@ options:
496
492
  --version show version
497
493
  ```
498
494
 
499
-
500
- ## Implemented list
501
- ### OpenAPI 3 and JsonSchema
502
- #### DataType
503
- - string (include patter/minLength/maxLenght)
504
- - number (include maximum/exclusiveMaximum/minimum/exclusiveMinimum/multipleOf/le/ge)
505
- - integer (include maximum/exclusiveMaximum/minimum/exclusiveMinimum/multipleOf/le/ge)
506
- - boolean
507
- - array
508
- - object
509
-
510
- ##### String Format
511
- - date
512
- - datetime
513
- - time
514
- - password
515
- - email
516
- - idn-email
517
- - uuid (uuid1/uuid2/uuid3/uuid4/uuid5)
518
- - ipv4
519
- - ipv6
520
- - ipv4-network
521
- - ipv6-network
522
- - hostname
523
- - decimal
524
-
525
- #### Other schema
526
- - enum (as enum.Enum or typing.Literal)
527
- - allOf (as Multiple inheritance)
528
- - anyOf (as typing.Union)
529
- - oneOf (as typing.Union)
530
- - $ref ([http extra](#http-extra-option) is required when resolving $ref for remote files.)
531
- - $id (for [JSONSchema](https://json-schema.org/understanding-json-schema/structuring.html#the-id-property))
532
-
533
-
534
495
  ## Related projects
535
496
  ### fastapi-code-generator
536
497
  This code generator creates [FastAPI](https://github.com/tiangolo/fastapi) app from an openapi file.
@@ -0,0 +1,42 @@
1
+ datamodel_code_generator/__init__.py,sha256=ny6ibAH0NzzZOmEEWea591gFiF5ZwRRNfnnHwN_mCMk,17703
2
+ datamodel_code_generator/__main__.py,sha256=dCis7YXh4wwnkz9txKZfyokQy9GYbH7NzC6A9Xjpkt0,25270
3
+ datamodel_code_generator/format.py,sha256=3P8WeWWubSretQbrMj3BKjt0z3081hEv0D0EBrl4Zqs,5698
4
+ datamodel_code_generator/http.py,sha256=Mf4_fTV-2Ni_4Slz-ey3Dtggw1eFAHVyJHnyMgXlmQM,586
5
+ datamodel_code_generator/imports.py,sha256=9qVskiTei-38Z1zQvu9sW0wBa95x9iPhHt686pDlTvI,3230
6
+ datamodel_code_generator/model/__init__.py,sha256=8mv8jt1MWSRZEZuWIBrk8F7UwGyvKvrQ2P3s58TzrIo,2260
7
+ datamodel_code_generator/model/base.py,sha256=O2aC8HXWlXKMeyHUzo1nOR1IP8wlV-4HUNN3hbT8Dzk,12081
8
+ datamodel_code_generator/model/dataclass.py,sha256=gUsjMhnQMVowBStY8ycbykaaQBEENoHl2EL7vcr3AJ8,3842
9
+ datamodel_code_generator/model/enum.py,sha256=Giehhtij2DZs2LssSJnG_CIIHsSA7Mkz471GU-Cb5kI,3338
10
+ datamodel_code_generator/model/improts.py,sha256=uQJgWKmH-afM3UJtRymJndxZfM6NvDOpde3BvvxPmt8,473
11
+ datamodel_code_generator/model/pydantic/__init__.py,sha256=LtgJEdAkeUAa3AyElEQL6ZbVI7vq7c0P1FBU4dIblN8,1335
12
+ datamodel_code_generator/model/pydantic/base_model.py,sha256=DTo7nxNPv3eVLQXWh06Rds2oMjJ9_FRfYWBL-6N8Sds,10341
13
+ datamodel_code_generator/model/pydantic/custom_root_type.py,sha256=XOeJqzUEAYE21C3hPAnRIz9iDWIjZvUOWDc9MCrpdvw,299
14
+ datamodel_code_generator/model/pydantic/dataclass.py,sha256=sbqTmutl8Fjf1pYngfdv0NMXt904QcTRpHqmZy6GUiQ,424
15
+ datamodel_code_generator/model/pydantic/imports.py,sha256=2nSLYwphBUMQEa0PTSNwoLjEBslu02EQb6BdZ-S51yk,2189
16
+ datamodel_code_generator/model/pydantic/types.py,sha256=PZINYgsBDAS3RZwYi_4iu9-kL3G3UwKDn2TSzo8mS5I,11765
17
+ datamodel_code_generator/model/rootmodel.py,sha256=8bW7emVQtDGe2iUAmqtlQb607LvTRL0TBSP66pIeNzY,202
18
+ datamodel_code_generator/model/template/Enum.jinja2,sha256=k9lB8iQUsB94bPi8e3xJEd0AGk2ciWL-pSZuGY5kNPQ,378
19
+ datamodel_code_generator/model/template/TypedDict.jinja2,sha256=J_Pe_CiuvTOb-EUCExXPaeTEFzn2keyrKB0wglZ8HgA,135
20
+ datamodel_code_generator/model/template/TypedDictClass.jinja2,sha256=URwp5__WyR8G21Hoyc17aMzoast-NppXnXe19VFi5wQ,377
21
+ datamodel_code_generator/model/template/TypedDictFunction.jinja2,sha256=KjSij5_w4ow4a12SR3orYOndmXGkIvJBBUN735bQ6G0,321
22
+ datamodel_code_generator/model/template/dataclass.jinja2,sha256=U-zwO9xoClVcNmNCDBnvZQo0_xcRWsPYh2QfsNMyUsQ,816
23
+ datamodel_code_generator/model/template/pydantic/BaseModel.jinja2,sha256=sYZa-47YAXqZrd5cYKVnPrsbDvLkHEJOUd7M0nAosP8,1084
24
+ datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2,sha256=WDdTXYNTrkIw-B4OvPVxOaETTknLs0zdNuq_iDQ2Bcw,1000
25
+ datamodel_code_generator/model/template/pydantic/Config.jinja2,sha256=Ik028qdqQhDfEP207TCbwVv2b5Do1-nRNDPKzBHKzwM,135
26
+ datamodel_code_generator/model/template/pydantic/dataclass.jinja2,sha256=hM4OZTVhtOokqlPNSdh5drhBXfQLPvbyO88jipSPr5Y,629
27
+ datamodel_code_generator/model/template/root.jinja2,sha256=3OTtibxLcGA-FMdR0QDCJUJQgf_kRW0OafeCTPFSFFo,162
28
+ datamodel_code_generator/model/typed_dict.py,sha256=yCih5a7UWM0Mm9pogdpfIxBL7oclZIMISUS7B2e5R0Q,4655
29
+ datamodel_code_generator/model/types.py,sha256=GmKGPrpbJRgcsfRv9Khpo26e8DgMrPBnmLw6cpdjdiE,2891
30
+ datamodel_code_generator/parser/__init__.py,sha256=zHbw6RPlJC0SAQjb-XyVlyZhcOu5PfYgPidy6jlUM8M,793
31
+ datamodel_code_generator/parser/base.py,sha256=MqLWSynkovpt_kDfkvfxtqLB-bjgaMTdTt-i4FzdSEM,43374
32
+ datamodel_code_generator/parser/jsonschema.py,sha256=EhEnAUfCOaRrPqhm1t2MvecwLqn5EMVbysVg5iBWpSM,63897
33
+ datamodel_code_generator/parser/openapi.py,sha256=EX55EW1Zv6hPfWOD4J9vdcnObm5bFrilB5GJwcpNooM,23306
34
+ datamodel_code_generator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
+ datamodel_code_generator/reference.py,sha256=q0QGhSckXtIvJG6SQlHEC3iGxy_vUDZY7EMlSZW5-Z8,24326
36
+ datamodel_code_generator/types.py,sha256=NJfGgNXOIC7LzFCr40r-AJnOJosBusAH7PS6RiJm8W4,16606
37
+ datamodel_code_generator/version.py,sha256=ywZOtUnDPsvM30GK-r8kjO_kBnrpneA4wWZ_zZKvII0,24
38
+ datamodel_code_generator-0.20.0.dist-info/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
39
+ datamodel_code_generator-0.20.0.dist-info/METADATA,sha256=ZdGd5YpgmIVmsImsJcoRsMKW5NNl6-MEBAzzfdOSJPg,22038
40
+ datamodel_code_generator-0.20.0.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
41
+ datamodel_code_generator-0.20.0.dist-info/entry_points.txt,sha256=bykbUWqOCiKfxJPGe8jpNqTqD1NG7uyRmozdnwzu7rk,76
42
+ datamodel_code_generator-0.20.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.5.2
2
+ Generator: poetry-core 1.6.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,38 +0,0 @@
1
- datamodel_code_generator/__init__.py,sha256=9jyLRpb_P4FwQfT4Mwx5o-Hgn1g7mHRkprYKA0xG2UY,17394
2
- datamodel_code_generator/__main__.py,sha256=rEM6wEBEfzfxz-DFHM3YLi5qpsdfcxKFIRG3cv7tqqc,24552
3
- datamodel_code_generator/format.py,sha256=RG-6GsU6d90hI9gKXapsVbTVn1INQvZvuPAc2Xiekao,4688
4
- datamodel_code_generator/http.py,sha256=Mf4_fTV-2Ni_4Slz-ey3Dtggw1eFAHVyJHnyMgXlmQM,586
5
- datamodel_code_generator/imports.py,sha256=h76smoWEmKQQQ7TenLzDwvAolkR8_VXo0VvlQWbhJUs,3149
6
- datamodel_code_generator/model/__init__.py,sha256=DX9cdzYTYMYnZJpRm2aSmJMP48ZHBCtJijStWM7ynoA,1642
7
- datamodel_code_generator/model/base.py,sha256=0eWdmNpIehxhY8fTe2gIDi5ULzkFL0iz8g4Ctg_Icho,11855
8
- datamodel_code_generator/model/dataclass.py,sha256=mxZZWqZqUVFSYRoL4uNDJ4HZz20D9vB_LyR_7FTfCIE,3632
9
- datamodel_code_generator/model/enum.py,sha256=Giehhtij2DZs2LssSJnG_CIIHsSA7Mkz471GU-Cb5kI,3338
10
- datamodel_code_generator/model/improts.py,sha256=osEpBubK1ZjS4oqvlLuTRxMsKuWtO4FU4TkMoeSVJ2c,177
11
- datamodel_code_generator/model/pydantic/__init__.py,sha256=LtgJEdAkeUAa3AyElEQL6ZbVI7vq7c0P1FBU4dIblN8,1335
12
- datamodel_code_generator/model/pydantic/base_model.py,sha256=DTo7nxNPv3eVLQXWh06Rds2oMjJ9_FRfYWBL-6N8Sds,10341
13
- datamodel_code_generator/model/pydantic/custom_root_type.py,sha256=XOeJqzUEAYE21C3hPAnRIz9iDWIjZvUOWDc9MCrpdvw,299
14
- datamodel_code_generator/model/pydantic/dataclass.py,sha256=sbqTmutl8Fjf1pYngfdv0NMXt904QcTRpHqmZy6GUiQ,424
15
- datamodel_code_generator/model/pydantic/imports.py,sha256=2nSLYwphBUMQEa0PTSNwoLjEBslu02EQb6BdZ-S51yk,2189
16
- datamodel_code_generator/model/pydantic/types.py,sha256=rHKqIZFu0lzjGiQJdO27kyIXuGzH1PevPXwlTKMPSwU,11794
17
- datamodel_code_generator/model/rootmodel.py,sha256=8bW7emVQtDGe2iUAmqtlQb607LvTRL0TBSP66pIeNzY,202
18
- datamodel_code_generator/model/template/Enum.jinja2,sha256=k9lB8iQUsB94bPi8e3xJEd0AGk2ciWL-pSZuGY5kNPQ,378
19
- datamodel_code_generator/model/template/dataclass.jinja2,sha256=U-zwO9xoClVcNmNCDBnvZQo0_xcRWsPYh2QfsNMyUsQ,816
20
- datamodel_code_generator/model/template/pydantic/BaseModel.jinja2,sha256=sYZa-47YAXqZrd5cYKVnPrsbDvLkHEJOUd7M0nAosP8,1084
21
- datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2,sha256=WDdTXYNTrkIw-B4OvPVxOaETTknLs0zdNuq_iDQ2Bcw,1000
22
- datamodel_code_generator/model/template/pydantic/Config.jinja2,sha256=Ik028qdqQhDfEP207TCbwVv2b5Do1-nRNDPKzBHKzwM,135
23
- datamodel_code_generator/model/template/pydantic/dataclass.jinja2,sha256=hM4OZTVhtOokqlPNSdh5drhBXfQLPvbyO88jipSPr5Y,629
24
- datamodel_code_generator/model/template/root.jinja2,sha256=3OTtibxLcGA-FMdR0QDCJUJQgf_kRW0OafeCTPFSFFo,162
25
- datamodel_code_generator/model/types.py,sha256=YwJfPcvrBPsSF16Bft-2j8lPajGpx9pUaQC2y_aRWAo,2920
26
- datamodel_code_generator/parser/__init__.py,sha256=zHbw6RPlJC0SAQjb-XyVlyZhcOu5PfYgPidy6jlUM8M,793
27
- datamodel_code_generator/parser/base.py,sha256=KVkpFCWUI9DstQQgAHqX6AIGC6FmkOTCDo800lg-y-M,43515
28
- datamodel_code_generator/parser/jsonschema.py,sha256=gKwqjej3Exy_umA8wjUpqieUwd-uTCB5xkZ1_zsPRzY,61423
29
- datamodel_code_generator/parser/openapi.py,sha256=Ntx_9V_fIGcdwjlaC2l3PAn8prwSBOQVdJbIfOXobh4,23925
30
- datamodel_code_generator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
- datamodel_code_generator/reference.py,sha256=q0QGhSckXtIvJG6SQlHEC3iGxy_vUDZY7EMlSZW5-Z8,24326
32
- datamodel_code_generator/types.py,sha256=ZhLw5kg7DtwjlaFicpSFx7E1BWCsq90HnJ-9mlWV06k,16241
33
- datamodel_code_generator/version.py,sha256=QepimqfEl3aW511yrvGuH1zmF7a8cIxFX_5WL6-nReM,24
34
- datamodel_code_generator-0.18.1.dist-info/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
35
- datamodel_code_generator-0.18.1.dist-info/METADATA,sha256=WczeWspzpXhigVl6wtId-we_sqrHfF1IOuD3VMjpziI,22802
36
- datamodel_code_generator-0.18.1.dist-info/WHEEL,sha256=7Z8_27uaHI_UZAc4Uox4PpBhQ9Y5_modZXWMxtUi4NU,88
37
- datamodel_code_generator-0.18.1.dist-info/entry_points.txt,sha256=bykbUWqOCiKfxJPGe8jpNqTqD1NG7uyRmozdnwzu7rk,76
38
- datamodel_code_generator-0.18.1.dist-info/RECORD,,