datamodel-code-generator 0.27.1__py3-none-any.whl → 0.27.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamodel-code-generator might be problematic. Click here for more details.
- datamodel_code_generator/__init__.py +159 -190
- datamodel_code_generator/__main__.py +151 -173
- datamodel_code_generator/arguments.py +227 -230
- datamodel_code_generator/format.py +77 -99
- datamodel_code_generator/http.py +9 -10
- datamodel_code_generator/imports.py +57 -64
- datamodel_code_generator/model/__init__.py +26 -31
- datamodel_code_generator/model/base.py +94 -127
- datamodel_code_generator/model/dataclass.py +58 -59
- datamodel_code_generator/model/enum.py +34 -30
- datamodel_code_generator/model/imports.py +13 -11
- datamodel_code_generator/model/msgspec.py +112 -126
- datamodel_code_generator/model/pydantic/__init__.py +14 -27
- datamodel_code_generator/model/pydantic/base_model.py +120 -139
- datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
- datamodel_code_generator/model/pydantic/dataclass.py +6 -4
- datamodel_code_generator/model/pydantic/imports.py +35 -33
- datamodel_code_generator/model/pydantic/types.py +86 -117
- datamodel_code_generator/model/pydantic_v2/__init__.py +17 -17
- datamodel_code_generator/model/pydantic_v2/base_model.py +118 -119
- datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
- datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
- datamodel_code_generator/model/pydantic_v2/types.py +8 -7
- datamodel_code_generator/model/rootmodel.py +1 -1
- datamodel_code_generator/model/scalar.py +33 -32
- datamodel_code_generator/model/typed_dict.py +42 -41
- datamodel_code_generator/model/types.py +19 -17
- datamodel_code_generator/model/union.py +21 -17
- datamodel_code_generator/parser/__init__.py +12 -11
- datamodel_code_generator/parser/base.py +320 -492
- datamodel_code_generator/parser/graphql.py +80 -111
- datamodel_code_generator/parser/jsonschema.py +422 -580
- datamodel_code_generator/parser/openapi.py +175 -204
- datamodel_code_generator/pydantic_patch.py +8 -9
- datamodel_code_generator/reference.py +192 -274
- datamodel_code_generator/types.py +147 -182
- datamodel_code_generator/util.py +22 -26
- {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/METADATA +12 -11
- datamodel_code_generator-0.27.3.dist-info/RECORD +59 -0
- datamodel_code_generator-0.27.1.dist-info/RECORD +0 -59
- {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/WHEEL +0 -0
- {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/entry_points.txt +0 -0
- {datamodel_code_generator-0.27.1.dist-info → datamodel_code_generator-0.27.3.dist-info}/licenses/LICENSE +0 -0
|
@@ -11,19 +11,13 @@ from typing import (
|
|
|
11
11
|
TYPE_CHECKING,
|
|
12
12
|
Any,
|
|
13
13
|
Callable,
|
|
14
|
-
DefaultDict,
|
|
15
14
|
Dict,
|
|
16
15
|
Iterator,
|
|
17
|
-
List,
|
|
18
16
|
Mapping,
|
|
19
|
-
Optional,
|
|
20
17
|
Sequence,
|
|
21
|
-
Set,
|
|
22
18
|
TextIO,
|
|
23
|
-
Tuple,
|
|
24
|
-
Type,
|
|
25
19
|
TypeVar,
|
|
26
|
-
|
|
20
|
+
cast,
|
|
27
21
|
)
|
|
28
22
|
from urllib.parse import ParseResult
|
|
29
23
|
|
|
@@ -31,13 +25,10 @@ import yaml
|
|
|
31
25
|
|
|
32
26
|
import datamodel_code_generator.pydantic_patch # noqa: F401
|
|
33
27
|
from datamodel_code_generator.format import DatetimeClassType, PythonVersion
|
|
34
|
-
from datamodel_code_generator.model.pydantic_v2 import UnionMode
|
|
35
28
|
from datamodel_code_generator.parser import DefaultPutDict, LiteralType
|
|
36
|
-
from datamodel_code_generator.
|
|
37
|
-
from datamodel_code_generator.types import StrictTypes
|
|
38
|
-
from datamodel_code_generator.util import SafeLoader # type: ignore
|
|
29
|
+
from datamodel_code_generator.util import SafeLoader
|
|
39
30
|
|
|
40
|
-
T = TypeVar(
|
|
31
|
+
T = TypeVar("T")
|
|
41
32
|
|
|
42
33
|
try:
|
|
43
34
|
import pysnooper
|
|
@@ -46,11 +37,11 @@ try:
|
|
|
46
37
|
except ImportError: # pragma: no cover
|
|
47
38
|
pysnooper = None
|
|
48
39
|
|
|
49
|
-
DEFAULT_BASE_CLASS: str =
|
|
40
|
+
DEFAULT_BASE_CLASS: str = "pydantic.BaseModel"
|
|
50
41
|
|
|
51
42
|
|
|
52
|
-
def load_yaml(stream:
|
|
53
|
-
return yaml.load(stream, Loader=SafeLoader)
|
|
43
|
+
def load_yaml(stream: str | TextIO) -> Any:
|
|
44
|
+
return yaml.load(stream, Loader=SafeLoader) # noqa: S506
|
|
54
45
|
|
|
55
46
|
|
|
56
47
|
def load_yaml_from_path(path: Path, encoding: str) -> Any:
|
|
@@ -59,24 +50,28 @@ def load_yaml_from_path(path: Path, encoding: str) -> Any:
|
|
|
59
50
|
|
|
60
51
|
|
|
61
52
|
if TYPE_CHECKING:
|
|
53
|
+
from collections import defaultdict
|
|
54
|
+
|
|
55
|
+
from datamodel_code_generator.model.pydantic_v2 import UnionMode
|
|
56
|
+
from datamodel_code_generator.parser.base import Parser
|
|
57
|
+
from datamodel_code_generator.types import StrictTypes
|
|
62
58
|
|
|
63
59
|
def get_version() -> str: ...
|
|
64
60
|
|
|
65
61
|
else:
|
|
66
62
|
|
|
67
63
|
def get_version() -> str:
|
|
68
|
-
package =
|
|
64
|
+
package = "datamodel-code-generator"
|
|
69
65
|
|
|
70
|
-
from importlib.metadata import version
|
|
66
|
+
from importlib.metadata import version # noqa: PLC0415
|
|
71
67
|
|
|
72
68
|
return version(package)
|
|
73
69
|
|
|
74
70
|
|
|
75
71
|
def enable_debug_message() -> None: # pragma: no cover
|
|
76
72
|
if not pysnooper:
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
)
|
|
73
|
+
msg = "Please run `$pip install 'datamodel-code-generator[debug]'` to use debug option"
|
|
74
|
+
raise Exception(msg) # noqa: TRY002
|
|
80
75
|
|
|
81
76
|
pysnooper.tracer.DISABLED = False
|
|
82
77
|
|
|
@@ -84,37 +79,15 @@ def enable_debug_message() -> None: # pragma: no cover
|
|
|
84
79
|
DEFAULT_MAX_VARIABLE_LENGTH: int = 100
|
|
85
80
|
|
|
86
81
|
|
|
87
|
-
def snooper_to_methods(
|
|
88
|
-
|
|
89
|
-
watch=(),
|
|
90
|
-
watch_explode=(),
|
|
91
|
-
depth=1,
|
|
92
|
-
prefix='',
|
|
93
|
-
overwrite=False,
|
|
94
|
-
thread_info=False,
|
|
95
|
-
custom_repr=(),
|
|
96
|
-
max_variable_length: Optional[int] = DEFAULT_MAX_VARIABLE_LENGTH,
|
|
97
|
-
) -> Callable[..., Any]:
|
|
98
|
-
def inner(cls: Type[T]) -> Type[T]:
|
|
82
|
+
def snooper_to_methods() -> Callable[..., Any]:
|
|
83
|
+
def inner(cls: type[T]) -> type[T]:
|
|
99
84
|
if not pysnooper:
|
|
100
85
|
return cls
|
|
101
|
-
import inspect
|
|
86
|
+
import inspect # noqa: PLC0415
|
|
102
87
|
|
|
103
88
|
methods = inspect.getmembers(cls, predicate=inspect.isfunction)
|
|
104
89
|
for name, method in methods:
|
|
105
|
-
snooper_method = pysnooper.snoop(
|
|
106
|
-
output,
|
|
107
|
-
watch,
|
|
108
|
-
watch_explode,
|
|
109
|
-
depth,
|
|
110
|
-
prefix,
|
|
111
|
-
overwrite,
|
|
112
|
-
thread_info,
|
|
113
|
-
custom_repr,
|
|
114
|
-
max_variable_length
|
|
115
|
-
if max_variable_length is not None
|
|
116
|
-
else DEFAULT_MAX_VARIABLE_LENGTH,
|
|
117
|
-
)(method)
|
|
90
|
+
snooper_method = pysnooper.snoop(max_variable_length=DEFAULT_MAX_VARIABLE_LENGTH)(method)
|
|
118
91
|
setattr(cls, name, snooper_method)
|
|
119
92
|
return cls
|
|
120
93
|
|
|
@@ -122,7 +95,7 @@ def snooper_to_methods( # type: ignore
|
|
|
122
95
|
|
|
123
96
|
|
|
124
97
|
@contextlib.contextmanager
|
|
125
|
-
def chdir(path:
|
|
98
|
+
def chdir(path: Path | None) -> Iterator[None]:
|
|
126
99
|
"""Changes working directory and returns to previous on exit."""
|
|
127
100
|
|
|
128
101
|
if path is None:
|
|
@@ -137,12 +110,12 @@ def chdir(path: Optional[Path]) -> Iterator[None]:
|
|
|
137
110
|
|
|
138
111
|
|
|
139
112
|
def is_openapi(text: str) -> bool:
|
|
140
|
-
return
|
|
113
|
+
return "openapi" in load_yaml(text)
|
|
141
114
|
|
|
142
115
|
|
|
143
|
-
JSON_SCHEMA_URLS:
|
|
144
|
-
|
|
145
|
-
|
|
116
|
+
JSON_SCHEMA_URLS: tuple[str, ...] = (
|
|
117
|
+
"http://json-schema.org/",
|
|
118
|
+
"https://json-schema.org/",
|
|
146
119
|
)
|
|
147
120
|
|
|
148
121
|
|
|
@@ -150,39 +123,35 @@ def is_schema(text: str) -> bool:
|
|
|
150
123
|
data = load_yaml(text)
|
|
151
124
|
if not isinstance(data, dict):
|
|
152
125
|
return False
|
|
153
|
-
schema = data.get(
|
|
154
|
-
if isinstance(schema, str) and any(
|
|
155
|
-
schema.startswith(u) for u in JSON_SCHEMA_URLS
|
|
156
|
-
): # pragma: no cover
|
|
126
|
+
schema = data.get("$schema")
|
|
127
|
+
if isinstance(schema, str) and any(schema.startswith(u) for u in JSON_SCHEMA_URLS): # pragma: no cover
|
|
157
128
|
return True
|
|
158
|
-
if isinstance(data.get(
|
|
129
|
+
if isinstance(data.get("type"), str):
|
|
159
130
|
return True
|
|
160
131
|
if any(
|
|
161
132
|
isinstance(data.get(o), list)
|
|
162
133
|
for o in (
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
134
|
+
"allOf",
|
|
135
|
+
"anyOf",
|
|
136
|
+
"oneOf",
|
|
166
137
|
)
|
|
167
138
|
):
|
|
168
139
|
return True
|
|
169
|
-
|
|
170
|
-
return True
|
|
171
|
-
return False
|
|
140
|
+
return bool(isinstance(data.get("properties"), dict))
|
|
172
141
|
|
|
173
142
|
|
|
174
143
|
class InputFileType(Enum):
|
|
175
|
-
Auto =
|
|
176
|
-
OpenAPI =
|
|
177
|
-
JsonSchema =
|
|
178
|
-
Json =
|
|
179
|
-
Yaml =
|
|
180
|
-
Dict =
|
|
181
|
-
CSV =
|
|
182
|
-
GraphQL =
|
|
144
|
+
Auto = "auto"
|
|
145
|
+
OpenAPI = "openapi"
|
|
146
|
+
JsonSchema = "jsonschema"
|
|
147
|
+
Json = "json"
|
|
148
|
+
Yaml = "yaml"
|
|
149
|
+
Dict = "dict"
|
|
150
|
+
CSV = "csv"
|
|
151
|
+
GraphQL = "graphql"
|
|
183
152
|
|
|
184
153
|
|
|
185
|
-
RAW_DATA_TYPES:
|
|
154
|
+
RAW_DATA_TYPES: list[InputFileType] = [
|
|
186
155
|
InputFileType.Json,
|
|
187
156
|
InputFileType.Yaml,
|
|
188
157
|
InputFileType.Dict,
|
|
@@ -192,22 +161,22 @@ RAW_DATA_TYPES: List[InputFileType] = [
|
|
|
192
161
|
|
|
193
162
|
|
|
194
163
|
class DataModelType(Enum):
|
|
195
|
-
PydanticBaseModel =
|
|
196
|
-
PydanticV2BaseModel =
|
|
197
|
-
DataclassesDataclass =
|
|
198
|
-
TypingTypedDict =
|
|
199
|
-
MsgspecStruct =
|
|
164
|
+
PydanticBaseModel = "pydantic.BaseModel"
|
|
165
|
+
PydanticV2BaseModel = "pydantic_v2.BaseModel"
|
|
166
|
+
DataclassesDataclass = "dataclasses.dataclass"
|
|
167
|
+
TypingTypedDict = "typing.TypedDict"
|
|
168
|
+
MsgspecStruct = "msgspec.Struct"
|
|
200
169
|
|
|
201
170
|
|
|
202
171
|
class OpenAPIScope(Enum):
|
|
203
|
-
Schemas =
|
|
204
|
-
Paths =
|
|
205
|
-
Tags =
|
|
206
|
-
Parameters =
|
|
172
|
+
Schemas = "schemas"
|
|
173
|
+
Paths = "paths"
|
|
174
|
+
Tags = "tags"
|
|
175
|
+
Parameters = "parameters"
|
|
207
176
|
|
|
208
177
|
|
|
209
178
|
class GraphQLScope(Enum):
|
|
210
|
-
Schema =
|
|
179
|
+
Schema = "schema"
|
|
211
180
|
|
|
212
181
|
|
|
213
182
|
class Error(Exception):
|
|
@@ -221,51 +190,52 @@ class Error(Exception):
|
|
|
221
190
|
class InvalidClassNameError(Error):
|
|
222
191
|
def __init__(self, class_name: str) -> None:
|
|
223
192
|
self.class_name = class_name
|
|
224
|
-
message = f
|
|
193
|
+
message = f"title={class_name!r} is invalid class name."
|
|
225
194
|
super().__init__(message=message)
|
|
226
195
|
|
|
227
196
|
|
|
228
197
|
def get_first_file(path: Path) -> Path: # pragma: no cover
|
|
229
198
|
if path.is_file():
|
|
230
199
|
return path
|
|
231
|
-
|
|
232
|
-
for child in path.rglob(
|
|
200
|
+
if path.is_dir():
|
|
201
|
+
for child in path.rglob("*"):
|
|
233
202
|
if child.is_file():
|
|
234
203
|
return child
|
|
235
|
-
|
|
204
|
+
msg = "File not found"
|
|
205
|
+
raise Error(msg)
|
|
236
206
|
|
|
237
207
|
|
|
238
|
-
def generate(
|
|
239
|
-
input_:
|
|
208
|
+
def generate( # noqa: PLR0912, PLR0913, PLR0914, PLR0915
|
|
209
|
+
input_: Path | str | ParseResult | Mapping[str, Any],
|
|
240
210
|
*,
|
|
241
|
-
input_filename:
|
|
211
|
+
input_filename: str | None = None,
|
|
242
212
|
input_file_type: InputFileType = InputFileType.Auto,
|
|
243
|
-
output:
|
|
213
|
+
output: Path | None = None,
|
|
244
214
|
output_model_type: DataModelType = DataModelType.PydanticBaseModel,
|
|
245
215
|
target_python_version: PythonVersion = PythonVersion.PY_38,
|
|
246
|
-
base_class: str =
|
|
247
|
-
additional_imports:
|
|
248
|
-
custom_template_dir:
|
|
249
|
-
extra_template_data:
|
|
216
|
+
base_class: str = "",
|
|
217
|
+
additional_imports: list[str] | None = None,
|
|
218
|
+
custom_template_dir: Path | None = None,
|
|
219
|
+
extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
|
|
250
220
|
validation: bool = False,
|
|
251
221
|
field_constraints: bool = False,
|
|
252
222
|
snake_case_field: bool = False,
|
|
253
223
|
strip_default_none: bool = False,
|
|
254
|
-
aliases:
|
|
224
|
+
aliases: Mapping[str, str] | None = None,
|
|
255
225
|
disable_timestamp: bool = False,
|
|
256
226
|
enable_version_header: bool = False,
|
|
257
227
|
allow_population_by_field_name: bool = False,
|
|
258
228
|
allow_extra_fields: bool = False,
|
|
259
229
|
apply_default_values_for_required_fields: bool = False,
|
|
260
230
|
force_optional_for_required_fields: bool = False,
|
|
261
|
-
class_name:
|
|
231
|
+
class_name: str | None = None,
|
|
262
232
|
use_standard_collections: bool = False,
|
|
263
233
|
use_schema_description: bool = False,
|
|
264
234
|
use_field_description: bool = False,
|
|
265
235
|
use_default_kwarg: bool = False,
|
|
266
236
|
reuse_model: bool = False,
|
|
267
|
-
encoding: str =
|
|
268
|
-
enum_field_as_literal:
|
|
237
|
+
encoding: str = "utf-8",
|
|
238
|
+
enum_field_as_literal: LiteralType | None = None,
|
|
269
239
|
use_one_literal_as_default: bool = False,
|
|
270
240
|
set_default_enum_member: bool = False,
|
|
271
241
|
use_subclass_enum: bool = False,
|
|
@@ -273,54 +243,52 @@ def generate(
|
|
|
273
243
|
use_generic_container_types: bool = False,
|
|
274
244
|
enable_faux_immutability: bool = False,
|
|
275
245
|
disable_appending_item_suffix: bool = False,
|
|
276
|
-
strict_types:
|
|
277
|
-
empty_enum_field_name:
|
|
278
|
-
custom_class_name_generator:
|
|
279
|
-
field_extra_keys:
|
|
246
|
+
strict_types: Sequence[StrictTypes] | None = None,
|
|
247
|
+
empty_enum_field_name: str | None = None,
|
|
248
|
+
custom_class_name_generator: Callable[[str], str] | None = None,
|
|
249
|
+
field_extra_keys: set[str] | None = None,
|
|
280
250
|
field_include_all_keys: bool = False,
|
|
281
|
-
field_extra_keys_without_x_prefix:
|
|
282
|
-
openapi_scopes:
|
|
283
|
-
graphql_scopes:
|
|
284
|
-
wrap_string_literal:
|
|
251
|
+
field_extra_keys_without_x_prefix: set[str] | None = None,
|
|
252
|
+
openapi_scopes: list[OpenAPIScope] | None = None,
|
|
253
|
+
graphql_scopes: list[GraphQLScope] | None = None, # noqa: ARG001
|
|
254
|
+
wrap_string_literal: bool | None = None,
|
|
285
255
|
use_title_as_name: bool = False,
|
|
286
256
|
use_operation_id_as_name: bool = False,
|
|
287
257
|
use_unique_items_as_set: bool = False,
|
|
288
|
-
http_headers:
|
|
258
|
+
http_headers: Sequence[tuple[str, str]] | None = None,
|
|
289
259
|
http_ignore_tls: bool = False,
|
|
290
260
|
use_annotated: bool = False,
|
|
291
261
|
use_non_positive_negative_number_constrained_types: bool = False,
|
|
292
|
-
original_field_name_delimiter:
|
|
262
|
+
original_field_name_delimiter: str | None = None,
|
|
293
263
|
use_double_quotes: bool = False,
|
|
294
264
|
use_union_operator: bool = False,
|
|
295
265
|
collapse_root_models: bool = False,
|
|
296
|
-
special_field_name_prefix:
|
|
266
|
+
special_field_name_prefix: str | None = None,
|
|
297
267
|
remove_special_field_name_prefix: bool = False,
|
|
298
268
|
capitalise_enum_members: bool = False,
|
|
299
269
|
keep_model_order: bool = False,
|
|
300
|
-
custom_file_header:
|
|
301
|
-
custom_file_header_path:
|
|
302
|
-
custom_formatters:
|
|
303
|
-
custom_formatters_kwargs:
|
|
270
|
+
custom_file_header: str | None = None,
|
|
271
|
+
custom_file_header_path: Path | None = None,
|
|
272
|
+
custom_formatters: list[str] | None = None,
|
|
273
|
+
custom_formatters_kwargs: dict[str, Any] | None = None,
|
|
304
274
|
use_pendulum: bool = False,
|
|
305
|
-
http_query_parameters:
|
|
275
|
+
http_query_parameters: Sequence[tuple[str, str]] | None = None,
|
|
306
276
|
treat_dots_as_module: bool = False,
|
|
307
277
|
use_exact_imports: bool = False,
|
|
308
|
-
union_mode:
|
|
309
|
-
output_datetime_class:
|
|
278
|
+
union_mode: UnionMode | None = None,
|
|
279
|
+
output_datetime_class: DatetimeClassType | None = None,
|
|
310
280
|
keyword_only: bool = False,
|
|
311
281
|
no_alias: bool = False,
|
|
312
282
|
) -> None:
|
|
313
283
|
remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
|
|
314
284
|
if isinstance(input_, str):
|
|
315
|
-
input_text:
|
|
285
|
+
input_text: str | None = input_
|
|
316
286
|
elif isinstance(input_, ParseResult):
|
|
317
|
-
from datamodel_code_generator.http import get_body
|
|
287
|
+
from datamodel_code_generator.http import get_body # noqa: PLC0415
|
|
318
288
|
|
|
319
289
|
input_text = remote_text_cache.get_or_put(
|
|
320
290
|
input_.geturl(),
|
|
321
|
-
default_factory=lambda url: get_body(
|
|
322
|
-
url, http_headers, http_ignore_tls, http_query_parameters
|
|
323
|
-
),
|
|
291
|
+
default_factory=lambda url: get_body(url, http_headers, http_ignore_tls, http_query_parameters),
|
|
324
292
|
)
|
|
325
293
|
else:
|
|
326
294
|
input_text = None
|
|
@@ -330,84 +298,85 @@ def generate(
|
|
|
330
298
|
if input_file_type == InputFileType.Auto:
|
|
331
299
|
try:
|
|
332
300
|
input_text_ = (
|
|
333
|
-
get_first_file(input_).read_text(encoding=encoding)
|
|
334
|
-
if isinstance(input_, Path)
|
|
335
|
-
else input_text
|
|
301
|
+
get_first_file(input_).read_text(encoding=encoding) if isinstance(input_, Path) else input_text
|
|
336
302
|
)
|
|
337
303
|
assert isinstance(input_text_, str)
|
|
338
304
|
input_file_type = infer_input_type(input_text_)
|
|
339
|
-
print(
|
|
305
|
+
print( # noqa: T201
|
|
340
306
|
inferred_message.format(input_file_type.value),
|
|
341
307
|
file=sys.stderr,
|
|
342
308
|
)
|
|
343
|
-
except:
|
|
344
|
-
|
|
309
|
+
except Exception as exc:
|
|
310
|
+
msg = "Invalid file format"
|
|
311
|
+
raise Error(msg) from exc
|
|
345
312
|
|
|
346
|
-
kwargs:
|
|
347
|
-
if input_file_type == InputFileType.OpenAPI:
|
|
348
|
-
from datamodel_code_generator.parser.openapi import OpenAPIParser
|
|
313
|
+
kwargs: dict[str, Any] = {}
|
|
314
|
+
if input_file_type == InputFileType.OpenAPI: # noqa: PLR1702
|
|
315
|
+
from datamodel_code_generator.parser.openapi import OpenAPIParser # noqa: PLC0415
|
|
349
316
|
|
|
350
|
-
parser_class:
|
|
351
|
-
kwargs[
|
|
317
|
+
parser_class: type[Parser] = OpenAPIParser
|
|
318
|
+
kwargs["openapi_scopes"] = openapi_scopes
|
|
352
319
|
elif input_file_type == InputFileType.GraphQL:
|
|
353
|
-
from datamodel_code_generator.parser.graphql import GraphQLParser
|
|
320
|
+
from datamodel_code_generator.parser.graphql import GraphQLParser # noqa: PLC0415
|
|
354
321
|
|
|
355
|
-
parser_class:
|
|
322
|
+
parser_class: type[Parser] = GraphQLParser
|
|
356
323
|
else:
|
|
357
|
-
from datamodel_code_generator.parser.jsonschema import JsonSchemaParser
|
|
324
|
+
from datamodel_code_generator.parser.jsonschema import JsonSchemaParser # noqa: PLC0415
|
|
358
325
|
|
|
359
326
|
parser_class = JsonSchemaParser
|
|
360
327
|
|
|
361
328
|
if input_file_type in RAW_DATA_TYPES:
|
|
362
|
-
import json
|
|
329
|
+
import json # noqa: PLC0415
|
|
363
330
|
|
|
364
331
|
try:
|
|
365
332
|
if isinstance(input_, Path) and input_.is_dir(): # pragma: no cover
|
|
366
|
-
|
|
367
|
-
|
|
333
|
+
msg = f"Input must be a file for {input_file_type}"
|
|
334
|
+
raise Error(msg) # noqa: TRY301
|
|
335
|
+
obj: dict[Any, Any]
|
|
368
336
|
if input_file_type == InputFileType.CSV:
|
|
369
|
-
import csv
|
|
337
|
+
import csv # noqa: PLC0415
|
|
370
338
|
|
|
371
|
-
def get_header_and_first_line(csv_file: IO[str]) ->
|
|
339
|
+
def get_header_and_first_line(csv_file: IO[str]) -> dict[str, Any]:
|
|
372
340
|
csv_reader = csv.DictReader(csv_file)
|
|
373
|
-
|
|
341
|
+
assert csv_reader.fieldnames is not None
|
|
342
|
+
return dict(zip(csv_reader.fieldnames, next(csv_reader)))
|
|
374
343
|
|
|
375
344
|
if isinstance(input_, Path):
|
|
376
345
|
with input_.open(encoding=encoding) as f:
|
|
377
346
|
obj = get_header_and_first_line(f)
|
|
378
347
|
else:
|
|
379
|
-
import io
|
|
348
|
+
import io # noqa: PLC0415
|
|
380
349
|
|
|
381
350
|
obj = get_header_and_first_line(io.StringIO(input_text))
|
|
382
351
|
elif input_file_type == InputFileType.Yaml:
|
|
383
|
-
|
|
384
|
-
input_.read_text(encoding=encoding)
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
352
|
+
if isinstance(input_, Path):
|
|
353
|
+
obj = load_yaml(input_.read_text(encoding=encoding))
|
|
354
|
+
else:
|
|
355
|
+
assert input_text is not None
|
|
356
|
+
obj = load_yaml(input_text)
|
|
388
357
|
elif input_file_type == InputFileType.Json:
|
|
389
|
-
|
|
390
|
-
input_.read_text(encoding=encoding)
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
358
|
+
if isinstance(input_, Path):
|
|
359
|
+
obj = json.loads(input_.read_text(encoding=encoding))
|
|
360
|
+
else:
|
|
361
|
+
assert input_text is not None
|
|
362
|
+
obj = json.loads(input_text)
|
|
394
363
|
elif input_file_type == InputFileType.Dict:
|
|
395
|
-
import ast
|
|
364
|
+
import ast # noqa: PLC0415
|
|
396
365
|
|
|
397
366
|
# Input can be a dict object stored in a python file
|
|
398
367
|
obj = (
|
|
399
|
-
ast.literal_eval(
|
|
400
|
-
input_.read_text(encoding=encoding) # type: ignore
|
|
401
|
-
)
|
|
368
|
+
ast.literal_eval(input_.read_text(encoding=encoding))
|
|
402
369
|
if isinstance(input_, Path)
|
|
403
|
-
else input_
|
|
370
|
+
else cast("Dict[Any, Any]", input_)
|
|
404
371
|
)
|
|
405
372
|
else: # pragma: no cover
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
373
|
+
msg = f"Unsupported input file type: {input_file_type}"
|
|
374
|
+
raise Error(msg) # noqa: TRY301
|
|
375
|
+
except Exception as exc:
|
|
376
|
+
msg = "Invalid file format"
|
|
377
|
+
raise Error(msg) from exc
|
|
409
378
|
|
|
410
|
-
from genson import SchemaBuilder
|
|
379
|
+
from genson import SchemaBuilder # noqa: PLC0415
|
|
411
380
|
|
|
412
381
|
builder = SchemaBuilder()
|
|
413
382
|
builder.add_object(obj)
|
|
@@ -418,17 +387,16 @@ def generate(
|
|
|
418
387
|
|
|
419
388
|
if union_mode is not None:
|
|
420
389
|
if output_model_type == DataModelType.PydanticV2BaseModel:
|
|
421
|
-
default_field_extras = {
|
|
390
|
+
default_field_extras = {"union_mode": union_mode}
|
|
422
391
|
else: # pragma: no cover
|
|
423
|
-
|
|
392
|
+
msg = "union_mode is only supported for pydantic_v2.BaseModel"
|
|
393
|
+
raise Error(msg)
|
|
424
394
|
else:
|
|
425
395
|
default_field_extras = None
|
|
426
396
|
|
|
427
|
-
from datamodel_code_generator.model import get_data_model_types
|
|
397
|
+
from datamodel_code_generator.model import get_data_model_types # noqa: PLC0415
|
|
428
398
|
|
|
429
|
-
data_model_types = get_data_model_types(
|
|
430
|
-
output_model_type, target_python_version, output_datetime_class
|
|
431
|
-
)
|
|
399
|
+
data_model_types = get_data_model_types(output_model_type, target_python_version, output_datetime_class)
|
|
432
400
|
source = input_text or input_
|
|
433
401
|
assert not isinstance(source, Mapping)
|
|
434
402
|
parser = parser_class(
|
|
@@ -454,9 +422,7 @@ def generate(
|
|
|
454
422
|
force_optional_for_required_fields=force_optional_for_required_fields,
|
|
455
423
|
class_name=class_name,
|
|
456
424
|
use_standard_collections=use_standard_collections,
|
|
457
|
-
base_path=input_.parent
|
|
458
|
-
if isinstance(input_, Path) and input_.is_file()
|
|
459
|
-
else None,
|
|
425
|
+
base_path=input_.parent if isinstance(input_, Path) and input_.is_file() else None,
|
|
460
426
|
use_schema_description=use_schema_description,
|
|
461
427
|
use_field_description=use_field_description,
|
|
462
428
|
use_default_kwarg=use_default_kwarg,
|
|
@@ -514,24 +480,27 @@ def generate(
|
|
|
514
480
|
results = parser.parse()
|
|
515
481
|
if not input_filename: # pragma: no cover
|
|
516
482
|
if isinstance(input_, str):
|
|
517
|
-
input_filename =
|
|
483
|
+
input_filename = "<stdin>"
|
|
518
484
|
elif isinstance(input_, ParseResult):
|
|
519
485
|
input_filename = input_.geturl()
|
|
520
486
|
elif input_file_type == InputFileType.Dict:
|
|
521
487
|
# input_ might be a dict object provided directly, and missing a name field
|
|
522
|
-
input_filename = getattr(input_,
|
|
488
|
+
input_filename = getattr(input_, "name", "<dict>")
|
|
523
489
|
else:
|
|
524
490
|
assert isinstance(input_, Path)
|
|
525
491
|
input_filename = input_.name
|
|
526
492
|
if not results:
|
|
527
|
-
|
|
528
|
-
|
|
493
|
+
msg = "Models not found in the input data"
|
|
494
|
+
raise Error(msg)
|
|
495
|
+
if isinstance(results, str):
|
|
529
496
|
modules = {output: (results, input_filename)}
|
|
530
497
|
else:
|
|
531
498
|
if output is None:
|
|
532
|
-
|
|
499
|
+
msg = "Modular references require an output directory"
|
|
500
|
+
raise Error(msg)
|
|
533
501
|
if output.suffix:
|
|
534
|
-
|
|
502
|
+
msg = "Modular references require an output directory, not a file"
|
|
503
|
+
raise Error(msg)
|
|
535
504
|
modules = {
|
|
536
505
|
output.joinpath(*name): (
|
|
537
506
|
result.body,
|
|
@@ -549,22 +518,22 @@ def generate(
|
|
|
549
518
|
# generated by datamodel-codegen:
|
|
550
519
|
# filename: {}"""
|
|
551
520
|
if not disable_timestamp:
|
|
552
|
-
header += f
|
|
521
|
+
header += f"\n# timestamp: {timestamp}"
|
|
553
522
|
if enable_version_header:
|
|
554
|
-
header += f
|
|
523
|
+
header += f"\n# version: {get_version()}"
|
|
555
524
|
|
|
556
|
-
file:
|
|
525
|
+
file: IO[Any] | None
|
|
557
526
|
for path, (body, filename) in modules.items():
|
|
558
527
|
if path is None:
|
|
559
528
|
file = None
|
|
560
529
|
else:
|
|
561
530
|
if not path.parent.exists():
|
|
562
531
|
path.parent.mkdir(parents=True)
|
|
563
|
-
file = path.open(
|
|
532
|
+
file = path.open("wt", encoding=encoding)
|
|
564
533
|
|
|
565
534
|
print(custom_file_header or header.format(filename), file=file)
|
|
566
535
|
if body:
|
|
567
|
-
print(
|
|
536
|
+
print(file=file)
|
|
568
537
|
print(body.rstrip(), file=file)
|
|
569
538
|
|
|
570
539
|
if file is not None:
|
|
@@ -574,22 +543,22 @@ def generate(
|
|
|
574
543
|
def infer_input_type(text: str) -> InputFileType:
|
|
575
544
|
if is_openapi(text):
|
|
576
545
|
return InputFileType.OpenAPI
|
|
577
|
-
|
|
546
|
+
if is_schema(text):
|
|
578
547
|
return InputFileType.JsonSchema
|
|
579
548
|
return InputFileType.Json
|
|
580
549
|
|
|
581
550
|
|
|
582
551
|
inferred_message = (
|
|
583
|
-
|
|
584
|
-
|
|
552
|
+
"The input file type was determined to be: {}\nThis can be specified explicitly with the "
|
|
553
|
+
"`--input-file-type` option."
|
|
585
554
|
)
|
|
586
555
|
|
|
587
556
|
__all__ = [
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
557
|
+
"DefaultPutDict",
|
|
558
|
+
"Error",
|
|
559
|
+
"InputFileType",
|
|
560
|
+
"InvalidClassNameError",
|
|
561
|
+
"LiteralType",
|
|
562
|
+
"PythonVersion",
|
|
563
|
+
"generate",
|
|
595
564
|
]
|