datamodel-code-generator 0.25.6__tar.gz → 0.25.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamodel-code-generator might be problematic. Click here for more details.
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/PKG-INFO +45 -20
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/README.md +44 -19
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/__init__.py +5 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/__main__.py +6 -2
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/arguments.py +13 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/imports.py +2 -1
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/base.py +2 -2
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/msgspec.py +6 -2
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/pydantic_v2/__init__.py +2 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/pydantic_v2/base_model.py +15 -4
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/msgspec.jinja2 +4 -2
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/parser/base.py +115 -14
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/parser/graphql.py +34 -19
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/parser/jsonschema.py +14 -6
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/parser/openapi.py +4 -0
- datamodel_code_generator-0.25.8/datamodel_code_generator/pydantic_patch.py +21 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/types.py +1 -6
- datamodel_code_generator-0.25.8/datamodel_code_generator/version.py +1 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/pyproject.toml +2 -2
- datamodel_code_generator-0.25.6/datamodel_code_generator/version.py +0 -1
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/LICENSE +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/format.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/http.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/__init__.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/dataclass.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/enum.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/imports.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/pydantic/__init__.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/pydantic/base_model.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/pydantic/custom_root_type.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/pydantic/dataclass.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/pydantic/imports.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/pydantic/types.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/pydantic_v2/imports.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/pydantic_v2/root_model.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/pydantic_v2/types.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/rootmodel.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/scalar.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/Enum.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/Scalar.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/TypedDict.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/TypedDictClass.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/TypedDictFunction.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/Union.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/dataclass.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/pydantic/Config.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/template/root.jinja2 +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/typed_dict.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/types.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/model/union.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/parser/__init__.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/py.typed +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/reference.py +0 -0
- {datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/util.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: datamodel-code-generator
|
|
3
|
-
Version: 0.25.
|
|
3
|
+
Version: 0.25.8
|
|
4
4
|
Summary: Datamodel Code Generator
|
|
5
5
|
Home-page: https://github.com/koxudaxi/datamodel-code-generator
|
|
6
6
|
License: MIT
|
|
@@ -61,9 +61,6 @@ and [msgspec.Struct](https://github.com/jcrist/msgspec) from an openapi file and
|
|
|
61
61
|
## Help
|
|
62
62
|
See [documentation](https://koxudaxi.github.io/datamodel-code-generator) for more details.
|
|
63
63
|
|
|
64
|
-
## Sponsors
|
|
65
|
-
[](https://github.com/JetBrainsOfficial)
|
|
66
|
-
|
|
67
64
|
## Quick Installation
|
|
68
65
|
|
|
69
66
|
To install `datamodel-code-generator`:
|
|
@@ -281,13 +278,52 @@ class Apis(BaseModel):
|
|
|
281
278
|
```
|
|
282
279
|
</details>
|
|
283
280
|
|
|
281
|
+
## Supported input types
|
|
282
|
+
- OpenAPI 3 (YAML/JSON, [OpenAPI Data Type](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#data-types));
|
|
283
|
+
- JSON Schema ([JSON Schema Core](http://json-schema.org/draft/2019-09/json-schema-validation.html)/[JSON Schema Validation](http://json-schema.org/draft/2019-09/json-schema-validation.html));
|
|
284
|
+
- JSON/YAML/CSV Data (it will be converted to JSON Schema);
|
|
285
|
+
- Python dictionary (it will be converted to JSON Schema);
|
|
286
|
+
- GraphQL schema ([GraphQL Schemas and Types](https://graphql.org/learn/schema/));
|
|
287
|
+
|
|
288
|
+
## Supported output types
|
|
289
|
+
- [pydantic](https://docs.pydantic.dev/1.10/).BaseModel;
|
|
290
|
+
- [pydantic_v2](https://docs.pydantic.dev/2.0/).BaseModel;
|
|
291
|
+
- [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html);
|
|
292
|
+
- [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict);
|
|
293
|
+
- [msgspec.Struct](https://github.com/jcrist/msgspec);
|
|
294
|
+
- Custom type from your [jinja2](https://jinja.palletsprojects.com/en/3.1.x/) template;
|
|
295
|
+
|
|
296
|
+
## Sponsors
|
|
297
|
+
<table>
|
|
298
|
+
<tr>
|
|
299
|
+
<td valign="top" align="center">
|
|
300
|
+
<a href="https://github.com/JetBrainsOfficial">
|
|
301
|
+
<img src="https://avatars.githubusercontent.com/u/60931315?s=100&v=4" alt="JetBrains Logo" style="width: 100px;">
|
|
302
|
+
<p>JetBrains</p>
|
|
303
|
+
</a>
|
|
304
|
+
</td>
|
|
305
|
+
<td valign="top" align="center">
|
|
306
|
+
<a href="https://github.com/astral-sh">
|
|
307
|
+
<img src="https://avatars.githubusercontent.com/u/115962839?s=200&v=4" alt="Astral Logo" style="width: 100px;">
|
|
308
|
+
<p>Astral</p>
|
|
309
|
+
</a>
|
|
310
|
+
</td>
|
|
311
|
+
<td valign="top" align="center">
|
|
312
|
+
<a href="https://github.com/DataDog">
|
|
313
|
+
<img src="https://avatars.githubusercontent.com/u/365230?s=200&v=4" alt="Datadog, Inc. Logo" style="width: 100px;">
|
|
314
|
+
<p>Datadog, Inc.</p>
|
|
315
|
+
</a>
|
|
316
|
+
</td>
|
|
317
|
+
</tr>
|
|
318
|
+
</table>
|
|
319
|
+
|
|
284
320
|
## Projects that use datamodel-code-generator
|
|
285
321
|
|
|
286
322
|
These OSS projects use datamodel-code-generator to generate many models.
|
|
287
323
|
See the following linked projects for real world examples and inspiration.
|
|
288
324
|
|
|
289
325
|
- [airbytehq/airbyte](https://github.com/airbytehq/airbyte)
|
|
290
|
-
- *[
|
|
326
|
+
- *[Generate Python, Java/Kotlin, and Typescript protocol models](https://github.com/airbytehq/airbyte-protocol/tree/main/protocol-models/bin)*
|
|
291
327
|
- [apache/iceberg](https://github.com/apache/iceberg)
|
|
292
328
|
- *[Generate Python code](https://github.com/apache/iceberg/blob/d2e1094ee0cc6239d43f63ba5114272f59d605d2/open-api/README.md?plain=1#L39)*
|
|
293
329
|
*[`make generate`](https://github.com/apache/iceberg/blob/d2e1094ee0cc6239d43f63ba5114272f59d605d2/open-api/Makefile#L24-L34)*
|
|
@@ -312,21 +348,6 @@ See the following linked projects for real world examples and inspiration.
|
|
|
312
348
|
- [SeldonIO/MLServer](https://github.com/SeldonIO/MLServer)
|
|
313
349
|
- *[generate-types.sh](https://github.com/SeldonIO/MLServer/blob/master/hack/generate-types.sh)*
|
|
314
350
|
|
|
315
|
-
## Supported input types
|
|
316
|
-
- OpenAPI 3 (YAML/JSON, [OpenAPI Data Type](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#data-types));
|
|
317
|
-
- JSON Schema ([JSON Schema Core](http://json-schema.org/draft/2019-09/json-schema-validation.html)/[JSON Schema Validation](http://json-schema.org/draft/2019-09/json-schema-validation.html));
|
|
318
|
-
- JSON/YAML/CSV Data (it will be converted to JSON Schema);
|
|
319
|
-
- Python dictionary (it will be converted to JSON Schema);
|
|
320
|
-
- GraphQL schema ([GraphQL Schemas and Types](https://graphql.org/learn/schema/));
|
|
321
|
-
|
|
322
|
-
## Supported output types
|
|
323
|
-
- [pydantic](https://docs.pydantic.dev/1.10/).BaseModel;
|
|
324
|
-
- [pydantic_v2](https://docs.pydantic.dev/2.0/).BaseModel;
|
|
325
|
-
- [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html);
|
|
326
|
-
- [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict);
|
|
327
|
-
- [msgspec.Struct](https://github.com/jcrist/msgspec);
|
|
328
|
-
- Custom type from your [jinja2](https://jinja.palletsprojects.com/en/3.1.x/) template;
|
|
329
|
-
|
|
330
351
|
## Installation
|
|
331
352
|
|
|
332
353
|
To install `datamodel-code-generator`:
|
|
@@ -364,6 +385,7 @@ This method needs the [http extra option](#http-extra-option)
|
|
|
364
385
|
## All Command Options
|
|
365
386
|
|
|
366
387
|
The `datamodel-codegen` command:
|
|
388
|
+
|
|
367
389
|
```bash
|
|
368
390
|
usage:
|
|
369
391
|
datamodel-codegen [options]
|
|
@@ -480,6 +502,9 @@ Model customization:
|
|
|
480
502
|
--use-schema-description
|
|
481
503
|
Use schema description to populate class docstring
|
|
482
504
|
--use-title-as-name use titles as class names of models
|
|
505
|
+
--use-exact-imports Import exact types instead of modules, for example:
|
|
506
|
+
`from .foo import Bar` instead of
|
|
507
|
+
`from . import foo` with `foo.Bar`
|
|
483
508
|
|
|
484
509
|
Template customization:
|
|
485
510
|
--aliases ALIASES Alias mapping file
|
|
@@ -16,9 +16,6 @@ and [msgspec.Struct](https://github.com/jcrist/msgspec) from an openapi file and
|
|
|
16
16
|
## Help
|
|
17
17
|
See [documentation](https://koxudaxi.github.io/datamodel-code-generator) for more details.
|
|
18
18
|
|
|
19
|
-
## Sponsors
|
|
20
|
-
[](https://github.com/JetBrainsOfficial)
|
|
21
|
-
|
|
22
19
|
## Quick Installation
|
|
23
20
|
|
|
24
21
|
To install `datamodel-code-generator`:
|
|
@@ -236,13 +233,52 @@ class Apis(BaseModel):
|
|
|
236
233
|
```
|
|
237
234
|
</details>
|
|
238
235
|
|
|
236
|
+
## Supported input types
|
|
237
|
+
- OpenAPI 3 (YAML/JSON, [OpenAPI Data Type](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#data-types));
|
|
238
|
+
- JSON Schema ([JSON Schema Core](http://json-schema.org/draft/2019-09/json-schema-validation.html)/[JSON Schema Validation](http://json-schema.org/draft/2019-09/json-schema-validation.html));
|
|
239
|
+
- JSON/YAML/CSV Data (it will be converted to JSON Schema);
|
|
240
|
+
- Python dictionary (it will be converted to JSON Schema);
|
|
241
|
+
- GraphQL schema ([GraphQL Schemas and Types](https://graphql.org/learn/schema/));
|
|
242
|
+
|
|
243
|
+
## Supported output types
|
|
244
|
+
- [pydantic](https://docs.pydantic.dev/1.10/).BaseModel;
|
|
245
|
+
- [pydantic_v2](https://docs.pydantic.dev/2.0/).BaseModel;
|
|
246
|
+
- [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html);
|
|
247
|
+
- [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict);
|
|
248
|
+
- [msgspec.Struct](https://github.com/jcrist/msgspec);
|
|
249
|
+
- Custom type from your [jinja2](https://jinja.palletsprojects.com/en/3.1.x/) template;
|
|
250
|
+
|
|
251
|
+
## Sponsors
|
|
252
|
+
<table>
|
|
253
|
+
<tr>
|
|
254
|
+
<td valign="top" align="center">
|
|
255
|
+
<a href="https://github.com/JetBrainsOfficial">
|
|
256
|
+
<img src="https://avatars.githubusercontent.com/u/60931315?s=100&v=4" alt="JetBrains Logo" style="width: 100px;">
|
|
257
|
+
<p>JetBrains</p>
|
|
258
|
+
</a>
|
|
259
|
+
</td>
|
|
260
|
+
<td valign="top" align="center">
|
|
261
|
+
<a href="https://github.com/astral-sh">
|
|
262
|
+
<img src="https://avatars.githubusercontent.com/u/115962839?s=200&v=4" alt="Astral Logo" style="width: 100px;">
|
|
263
|
+
<p>Astral</p>
|
|
264
|
+
</a>
|
|
265
|
+
</td>
|
|
266
|
+
<td valign="top" align="center">
|
|
267
|
+
<a href="https://github.com/DataDog">
|
|
268
|
+
<img src="https://avatars.githubusercontent.com/u/365230?s=200&v=4" alt="Datadog, Inc. Logo" style="width: 100px;">
|
|
269
|
+
<p>Datadog, Inc.</p>
|
|
270
|
+
</a>
|
|
271
|
+
</td>
|
|
272
|
+
</tr>
|
|
273
|
+
</table>
|
|
274
|
+
|
|
239
275
|
## Projects that use datamodel-code-generator
|
|
240
276
|
|
|
241
277
|
These OSS projects use datamodel-code-generator to generate many models.
|
|
242
278
|
See the following linked projects for real world examples and inspiration.
|
|
243
279
|
|
|
244
280
|
- [airbytehq/airbyte](https://github.com/airbytehq/airbyte)
|
|
245
|
-
- *[
|
|
281
|
+
- *[Generate Python, Java/Kotlin, and Typescript protocol models](https://github.com/airbytehq/airbyte-protocol/tree/main/protocol-models/bin)*
|
|
246
282
|
- [apache/iceberg](https://github.com/apache/iceberg)
|
|
247
283
|
- *[Generate Python code](https://github.com/apache/iceberg/blob/d2e1094ee0cc6239d43f63ba5114272f59d605d2/open-api/README.md?plain=1#L39)*
|
|
248
284
|
*[`make generate`](https://github.com/apache/iceberg/blob/d2e1094ee0cc6239d43f63ba5114272f59d605d2/open-api/Makefile#L24-L34)*
|
|
@@ -267,21 +303,6 @@ See the following linked projects for real world examples and inspiration.
|
|
|
267
303
|
- [SeldonIO/MLServer](https://github.com/SeldonIO/MLServer)
|
|
268
304
|
- *[generate-types.sh](https://github.com/SeldonIO/MLServer/blob/master/hack/generate-types.sh)*
|
|
269
305
|
|
|
270
|
-
## Supported input types
|
|
271
|
-
- OpenAPI 3 (YAML/JSON, [OpenAPI Data Type](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#data-types));
|
|
272
|
-
- JSON Schema ([JSON Schema Core](http://json-schema.org/draft/2019-09/json-schema-validation.html)/[JSON Schema Validation](http://json-schema.org/draft/2019-09/json-schema-validation.html));
|
|
273
|
-
- JSON/YAML/CSV Data (it will be converted to JSON Schema);
|
|
274
|
-
- Python dictionary (it will be converted to JSON Schema);
|
|
275
|
-
- GraphQL schema ([GraphQL Schemas and Types](https://graphql.org/learn/schema/));
|
|
276
|
-
|
|
277
|
-
## Supported output types
|
|
278
|
-
- [pydantic](https://docs.pydantic.dev/1.10/).BaseModel;
|
|
279
|
-
- [pydantic_v2](https://docs.pydantic.dev/2.0/).BaseModel;
|
|
280
|
-
- [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html);
|
|
281
|
-
- [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict);
|
|
282
|
-
- [msgspec.Struct](https://github.com/jcrist/msgspec);
|
|
283
|
-
- Custom type from your [jinja2](https://jinja.palletsprojects.com/en/3.1.x/) template;
|
|
284
|
-
|
|
285
306
|
## Installation
|
|
286
307
|
|
|
287
308
|
To install `datamodel-code-generator`:
|
|
@@ -319,6 +340,7 @@ This method needs the [http extra option](#http-extra-option)
|
|
|
319
340
|
## All Command Options
|
|
320
341
|
|
|
321
342
|
The `datamodel-codegen` command:
|
|
343
|
+
|
|
322
344
|
```bash
|
|
323
345
|
usage:
|
|
324
346
|
datamodel-codegen [options]
|
|
@@ -435,6 +457,9 @@ Model customization:
|
|
|
435
457
|
--use-schema-description
|
|
436
458
|
Use schema description to populate class docstring
|
|
437
459
|
--use-title-as-name use titles as class names of models
|
|
460
|
+
--use-exact-imports Import exact types instead of modules, for example:
|
|
461
|
+
`from .foo import Bar` instead of
|
|
462
|
+
`from . import foo` with `foo.Bar`
|
|
438
463
|
|
|
439
464
|
Template customization:
|
|
440
465
|
--aliases ALIASES Alias mapping file
|
|
@@ -29,6 +29,7 @@ from urllib.parse import ParseResult
|
|
|
29
29
|
|
|
30
30
|
import yaml
|
|
31
31
|
|
|
32
|
+
import datamodel_code_generator.pydantic_patch # noqa: F401
|
|
32
33
|
from datamodel_code_generator.format import PythonVersion
|
|
33
34
|
from datamodel_code_generator.parser import DefaultPutDict, LiteralType
|
|
34
35
|
from datamodel_code_generator.parser.base import Parser
|
|
@@ -301,6 +302,8 @@ def generate(
|
|
|
301
302
|
custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
|
|
302
303
|
use_pendulum: bool = False,
|
|
303
304
|
http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
|
|
305
|
+
treat_dots_as_module: bool = False,
|
|
306
|
+
use_exact_imports: bool = False,
|
|
304
307
|
) -> None:
|
|
305
308
|
remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
|
|
306
309
|
if isinstance(input_, str):
|
|
@@ -461,6 +464,8 @@ def generate(
|
|
|
461
464
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
462
465
|
use_pendulum=use_pendulum,
|
|
463
466
|
http_query_parameters=http_query_parameters,
|
|
467
|
+
treat_dots_as_module=treat_dots_as_module,
|
|
468
|
+
use_exact_imports=use_exact_imports,
|
|
464
469
|
**kwargs,
|
|
465
470
|
)
|
|
466
471
|
|
|
@@ -312,6 +312,8 @@ class Config(BaseModel):
|
|
|
312
312
|
custom_formatters_kwargs: Optional[TextIOBase] = None
|
|
313
313
|
use_pendulum: bool = False
|
|
314
314
|
http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None
|
|
315
|
+
treat_dot_as_module: bool = False
|
|
316
|
+
use_exact_imports: bool = False
|
|
315
317
|
|
|
316
318
|
def merge_args(self, args: Namespace) -> None:
|
|
317
319
|
set_args = {
|
|
@@ -427,7 +429,7 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
|
|
|
427
429
|
with config.custom_formatters_kwargs as data:
|
|
428
430
|
try:
|
|
429
431
|
custom_formatters_kwargs = json.load(data)
|
|
430
|
-
except json.JSONDecodeError as e:
|
|
432
|
+
except json.JSONDecodeError as e: # pragma: no cover
|
|
431
433
|
print(
|
|
432
434
|
f'Unable to load custom_formatters_kwargs mapping: {e}',
|
|
433
435
|
file=sys.stderr,
|
|
@@ -436,7 +438,7 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
|
|
|
436
438
|
if not isinstance(custom_formatters_kwargs, dict) or not all(
|
|
437
439
|
isinstance(k, str) and isinstance(v, str)
|
|
438
440
|
for k, v in custom_formatters_kwargs.items()
|
|
439
|
-
):
|
|
441
|
+
): # pragma: no cover
|
|
440
442
|
print(
|
|
441
443
|
'Custom formatters kwargs mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
|
|
442
444
|
file=sys.stderr,
|
|
@@ -508,6 +510,8 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
|
|
|
508
510
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
509
511
|
use_pendulum=config.use_pendulum,
|
|
510
512
|
http_query_parameters=config.http_query_parameters,
|
|
513
|
+
treat_dots_as_module=config.treat_dot_as_module,
|
|
514
|
+
use_exact_imports=config.use_exact_imports,
|
|
511
515
|
)
|
|
512
516
|
return Exit.OK
|
|
513
517
|
except InvalidClassNameError as e:
|
|
@@ -160,6 +160,12 @@ model_options.add_argument(
|
|
|
160
160
|
help='target python version (default: 3.7)',
|
|
161
161
|
choices=[v.value for v in PythonVersion],
|
|
162
162
|
)
|
|
163
|
+
model_options.add_argument(
|
|
164
|
+
'--treat-dot-as-module',
|
|
165
|
+
help='treat dotted module names as modules',
|
|
166
|
+
action='store_true',
|
|
167
|
+
default=False,
|
|
168
|
+
)
|
|
163
169
|
model_options.add_argument(
|
|
164
170
|
'--use-schema-description',
|
|
165
171
|
help='Use schema description to populate class docstring',
|
|
@@ -178,6 +184,13 @@ model_options.add_argument(
|
|
|
178
184
|
action='store_true',
|
|
179
185
|
default=False,
|
|
180
186
|
)
|
|
187
|
+
model_options.add_argument(
|
|
188
|
+
'--use-exact-imports',
|
|
189
|
+
help='import exact types instead of modules, for example: "from .foo import Bar" instead of '
|
|
190
|
+
'"from . import foo" with "foo.Bar"',
|
|
191
|
+
action='store_true',
|
|
192
|
+
default=False,
|
|
193
|
+
)
|
|
181
194
|
|
|
182
195
|
# ======================================================================================
|
|
183
196
|
# Typing options for generated models
|
|
@@ -26,11 +26,12 @@ class Imports(DefaultDict[Optional[str], Set[str]]):
|
|
|
26
26
|
def __str__(self) -> str:
|
|
27
27
|
return self.dump()
|
|
28
28
|
|
|
29
|
-
def __init__(self) -> None:
|
|
29
|
+
def __init__(self, use_exact: bool = False) -> None:
|
|
30
30
|
super().__init__(set)
|
|
31
31
|
self.alias: DefaultDict[Optional[str], Dict[str, str]] = defaultdict(dict)
|
|
32
32
|
self.counter: Dict[Tuple[Optional[str], str], int] = defaultdict(int)
|
|
33
33
|
self.reference_paths: Dict[str, Import] = {}
|
|
34
|
+
self.use_exact: bool = use_exact
|
|
34
35
|
|
|
35
36
|
def _set_alias(self, from_: Optional[str], imports: Set[str]) -> List[str]:
|
|
36
37
|
return [
|
|
@@ -76,7 +76,7 @@ class ConstraintsBase(_BaseModel):
|
|
|
76
76
|
}
|
|
77
77
|
constraints_class = a.__class__
|
|
78
78
|
else:
|
|
79
|
-
root_type_field_constraints = {}
|
|
79
|
+
root_type_field_constraints = {} # pragma: no cover
|
|
80
80
|
|
|
81
81
|
if isinstance(b, ConstraintsBase): # pragma: no cover
|
|
82
82
|
model_field_constraints = {
|
|
@@ -86,7 +86,7 @@ class ConstraintsBase(_BaseModel):
|
|
|
86
86
|
else:
|
|
87
87
|
model_field_constraints = {}
|
|
88
88
|
|
|
89
|
-
if not issubclass(constraints_class, ConstraintsBase):
|
|
89
|
+
if not issubclass(constraints_class, ConstraintsBase): # pragma: no cover
|
|
90
90
|
return None
|
|
91
91
|
|
|
92
92
|
return constraints_class.parse_obj(
|
|
@@ -33,7 +33,7 @@ from datamodel_code_generator.types import chain_as_tuple, get_optional_type
|
|
|
33
33
|
|
|
34
34
|
|
|
35
35
|
def _has_field_assignment(field: DataModelFieldBase) -> bool:
|
|
36
|
-
return
|
|
36
|
+
return not (
|
|
37
37
|
field.required
|
|
38
38
|
or (field.represented_default == 'None' and field.strip_default_none)
|
|
39
39
|
)
|
|
@@ -48,7 +48,9 @@ def import_extender(cls: Type[DataModelFieldBaseT]) -> Type[DataModelFieldBaseT]
|
|
|
48
48
|
@wraps(original_imports.fget) # type: ignore
|
|
49
49
|
def new_imports(self: DataModelFieldBaseT) -> Tuple[Import, ...]:
|
|
50
50
|
extra_imports = []
|
|
51
|
-
|
|
51
|
+
field = self.field
|
|
52
|
+
# TODO: Improve field detection
|
|
53
|
+
if field and field.startswith('field('):
|
|
52
54
|
extra_imports.append(IMPORT_MSGSPEC_FIELD)
|
|
53
55
|
if self.field and 'lambda: convert' in self.field:
|
|
54
56
|
extra_imports.append(IMPORT_MSGSPEC_CONVERT)
|
|
@@ -177,6 +179,8 @@ class DataModelField(DataModelFieldBase):
|
|
|
177
179
|
|
|
178
180
|
if self.default != UNDEFINED and self.default is not None:
|
|
179
181
|
data['default'] = self.default
|
|
182
|
+
elif not self.required:
|
|
183
|
+
data['default'] = None
|
|
180
184
|
|
|
181
185
|
if self.required:
|
|
182
186
|
data = {
|
|
@@ -18,9 +18,11 @@ class ConfigDict(_BaseModel):
|
|
|
18
18
|
title: Optional[str] = None
|
|
19
19
|
populate_by_name: Optional[bool] = None
|
|
20
20
|
allow_extra_fields: Optional[bool] = None
|
|
21
|
+
from_attributes: Optional[bool] = None
|
|
21
22
|
frozen: Optional[bool] = None
|
|
22
23
|
arbitrary_types_allowed: Optional[bool] = None
|
|
23
24
|
protected_namespaces: Optional[Tuple[str, ...]] = None
|
|
25
|
+
regex_engine: Optional[str] = None
|
|
24
26
|
|
|
25
27
|
|
|
26
28
|
__all__ = [
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import re
|
|
1
2
|
from pathlib import Path
|
|
2
3
|
from typing import (
|
|
3
4
|
TYPE_CHECKING,
|
|
@@ -32,7 +33,7 @@ if TYPE_CHECKING:
|
|
|
32
33
|
else:
|
|
33
34
|
try:
|
|
34
35
|
from typing import Literal
|
|
35
|
-
except ImportError:
|
|
36
|
+
except ImportError: # pragma: no cover
|
|
36
37
|
from typing_extensions import Literal
|
|
37
38
|
|
|
38
39
|
|
|
@@ -105,7 +106,7 @@ class DataModelField(DataModelFieldV1):
|
|
|
105
106
|
|
|
106
107
|
@field_validator('extras')
|
|
107
108
|
def validate_extras(cls, values: Any) -> Dict[str, Any]:
|
|
108
|
-
if not isinstance(values, dict):
|
|
109
|
+
if not isinstance(values, dict): # pragma: no cover
|
|
109
110
|
return values
|
|
110
111
|
if 'examples' in values:
|
|
111
112
|
return values
|
|
@@ -145,7 +146,7 @@ class DataModelField(DataModelFieldV1):
|
|
|
145
146
|
self, field_arguments: List[str]
|
|
146
147
|
) -> List[str]:
|
|
147
148
|
if not self.required or self.const:
|
|
148
|
-
if self.use_default_kwarg:
|
|
149
|
+
if self.use_default_kwarg: # pragma: no cover
|
|
149
150
|
return [
|
|
150
151
|
f'default={repr(self.default)}',
|
|
151
152
|
*field_arguments,
|
|
@@ -214,10 +215,20 @@ class BaseModel(BaseModelBase):
|
|
|
214
215
|
else self.extra_template_data[from_]
|
|
215
216
|
)
|
|
216
217
|
for data_type in self.all_data_types:
|
|
217
|
-
if data_type.is_custom_type:
|
|
218
|
+
if data_type.is_custom_type: # pragma: no cover
|
|
218
219
|
config_parameters['arbitrary_types_allowed'] = True
|
|
219
220
|
break
|
|
220
221
|
|
|
222
|
+
for field in self.fields:
|
|
223
|
+
# Check if a regex pattern uses lookarounds.
|
|
224
|
+
# Depending on the generation configuration, the pattern may end up in two different places.
|
|
225
|
+
pattern = (
|
|
226
|
+
isinstance(field.constraints, Constraints) and field.constraints.pattern
|
|
227
|
+
) or (field.data_type.kwargs or {}).get('pattern')
|
|
228
|
+
if pattern and re.search(r'\(\?<?[=!]', pattern):
|
|
229
|
+
config_parameters['regex_engine'] = '"python-re"'
|
|
230
|
+
break
|
|
231
|
+
|
|
221
232
|
if isinstance(self.extra_template_data.get('config'), dict):
|
|
222
233
|
for key, value in self.extra_template_data['config'].items():
|
|
223
234
|
config_parameters[key] = value
|
|
@@ -18,12 +18,14 @@ class {{ class_name }}:
|
|
|
18
18
|
{%- if not field.annotated and field.field %}
|
|
19
19
|
{{ field.name }}: {{ field.type_hint }} = {{ field.field }}
|
|
20
20
|
{%- else %}
|
|
21
|
-
{%- if field.annotated %}
|
|
21
|
+
{%- if field.annotated and not field.field %}
|
|
22
22
|
{{ field.name }}: {{ field.annotated }}
|
|
23
|
+
{%- elif field.annotated and field.field %}
|
|
24
|
+
{{ field.name }}: {{ field.annotated }} = {{ field.field }}
|
|
23
25
|
{%- else %}
|
|
24
26
|
{{ field.name }}: {{ field.type_hint }}
|
|
25
27
|
{%- endif %}
|
|
26
|
-
{%- if not field.required or field.data_type.is_optional or field.nullable
|
|
28
|
+
{%- if not field.field and (not field.required or field.data_type.is_optional or field.nullable)
|
|
27
29
|
%} = {{ field.represented_default }}
|
|
28
30
|
{%- endif -%}
|
|
29
31
|
{%- endif %}
|
|
@@ -238,6 +238,14 @@ def relative(current_module: str, reference: str) -> Tuple[str, str]:
|
|
|
238
238
|
return left, right
|
|
239
239
|
|
|
240
240
|
|
|
241
|
+
def exact_import(from_: str, import_: str, short_name: str) -> Tuple[str, str]:
|
|
242
|
+
if from_ == '.':
|
|
243
|
+
# Prevents "from . import foo" becoming "from ..foo import Foo"
|
|
244
|
+
# when our imported module has the same parent
|
|
245
|
+
return f'.{import_}', short_name
|
|
246
|
+
return f'{from_}.{import_}', short_name
|
|
247
|
+
|
|
248
|
+
|
|
241
249
|
@runtime_checkable
|
|
242
250
|
class Child(Protocol):
|
|
243
251
|
@property
|
|
@@ -295,7 +303,7 @@ def _copy_data_types(data_types: List[DataType]) -> List[DataType]:
|
|
|
295
303
|
copied_data_types.append(
|
|
296
304
|
data_type_.__class__(reference=data_type_.reference)
|
|
297
305
|
)
|
|
298
|
-
elif data_type_.data_types:
|
|
306
|
+
elif data_type_.data_types: # pragma: no cover
|
|
299
307
|
copied_data_type = data_type_.copy()
|
|
300
308
|
copied_data_type.data_types = _copy_data_types(data_type_.data_types)
|
|
301
309
|
copied_data_types.append(copied_data_type)
|
|
@@ -392,6 +400,8 @@ class Parser(ABC):
|
|
|
392
400
|
custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
|
|
393
401
|
use_pendulum: bool = False,
|
|
394
402
|
http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
|
|
403
|
+
treat_dots_as_module: bool = False,
|
|
404
|
+
use_exact_imports: bool = False,
|
|
395
405
|
) -> None:
|
|
396
406
|
self.data_type_manager: DataTypeManager = data_type_manager_type(
|
|
397
407
|
python_version=target_python_version,
|
|
@@ -405,7 +415,8 @@ class Parser(ABC):
|
|
|
405
415
|
self.data_model_root_type: Type[DataModel] = data_model_root_type
|
|
406
416
|
self.data_model_field_type: Type[DataModelFieldBase] = data_model_field_type
|
|
407
417
|
|
|
408
|
-
self.imports: Imports = Imports()
|
|
418
|
+
self.imports: Imports = Imports(use_exact_imports)
|
|
419
|
+
self.use_exact_imports: bool = use_exact_imports
|
|
409
420
|
self._append_additional_imports(additional_imports=additional_imports)
|
|
410
421
|
|
|
411
422
|
self.base_class: Optional[str] = base_class
|
|
@@ -514,6 +525,7 @@ class Parser(ABC):
|
|
|
514
525
|
self.known_third_party = known_third_party
|
|
515
526
|
self.custom_formatter = custom_formatters
|
|
516
527
|
self.custom_formatters_kwargs = custom_formatters_kwargs
|
|
528
|
+
self.treat_dots_as_module = treat_dots_as_module
|
|
517
529
|
|
|
518
530
|
@property
|
|
519
531
|
def iter_source(self) -> Iterator[Source]:
|
|
@@ -666,9 +678,8 @@ class Parser(ABC):
|
|
|
666
678
|
model.class_name = duplicate_name
|
|
667
679
|
model_names[duplicate_name] = model
|
|
668
680
|
|
|
669
|
-
@classmethod
|
|
670
681
|
def __change_from_import(
|
|
671
|
-
|
|
682
|
+
self,
|
|
672
683
|
models: List[DataModel],
|
|
673
684
|
imports: Imports,
|
|
674
685
|
scoped_model_resolver: ModelResolver,
|
|
@@ -700,6 +711,18 @@ class Parser(ABC):
|
|
|
700
711
|
from_, import_ = full_path = relative(
|
|
701
712
|
model.module_name, data_type.full_name
|
|
702
713
|
)
|
|
714
|
+
if imports.use_exact: # pragma: no cover
|
|
715
|
+
from_, import_ = exact_import(
|
|
716
|
+
from_, import_, data_type.reference.short_name
|
|
717
|
+
)
|
|
718
|
+
import_ = import_.replace('-', '_')
|
|
719
|
+
if (
|
|
720
|
+
len(model.module_path) > 1
|
|
721
|
+
and model.module_path[-1].count('.') > 0
|
|
722
|
+
and not self.treat_dots_as_module
|
|
723
|
+
):
|
|
724
|
+
rel_path_depth = model.module_path[-1].count('.')
|
|
725
|
+
from_ = from_[rel_path_depth:]
|
|
703
726
|
|
|
704
727
|
alias = scoped_model_resolver.add(full_path, import_).name
|
|
705
728
|
|
|
@@ -707,7 +730,7 @@ class Parser(ABC):
|
|
|
707
730
|
if from_ and import_ and alias != name:
|
|
708
731
|
data_type.alias = (
|
|
709
732
|
alias
|
|
710
|
-
if
|
|
733
|
+
if data_type.reference.short_name == import_
|
|
711
734
|
else f'{alias}.{name}'
|
|
712
735
|
)
|
|
713
736
|
|
|
@@ -778,8 +801,18 @@ class Parser(ABC):
|
|
|
778
801
|
discriminator_model.path.split('#/')[-1]
|
|
779
802
|
!= path.split('#/')[-1]
|
|
780
803
|
):
|
|
781
|
-
|
|
782
|
-
|
|
804
|
+
if (
|
|
805
|
+
path.startswith('#/')
|
|
806
|
+
or discriminator_model.path[:-1]
|
|
807
|
+
!= path.split('/')[-1]
|
|
808
|
+
):
|
|
809
|
+
t_path = path[str(path).find('/') + 1 :]
|
|
810
|
+
t_disc = discriminator_model.path[
|
|
811
|
+
: str(discriminator_model.path).find('#')
|
|
812
|
+
].lstrip('../')
|
|
813
|
+
t_disc_2 = '/'.join(t_disc.split('/')[1:])
|
|
814
|
+
if t_path != t_disc and t_path != t_disc_2:
|
|
815
|
+
continue
|
|
783
816
|
type_names.append(name)
|
|
784
817
|
else:
|
|
785
818
|
type_names = [discriminator_model.path.split('/')[-1]]
|
|
@@ -822,11 +855,16 @@ class Parser(ABC):
|
|
|
822
855
|
required=True,
|
|
823
856
|
)
|
|
824
857
|
)
|
|
825
|
-
|
|
858
|
+
literal = (
|
|
826
859
|
IMPORT_LITERAL
|
|
827
860
|
if self.target_python_version.has_literal_type
|
|
828
861
|
else IMPORT_LITERAL_BACKPORT
|
|
829
862
|
)
|
|
863
|
+
has_imported_literal = any(
|
|
864
|
+
literal == import_ for import_ in imports
|
|
865
|
+
)
|
|
866
|
+
if has_imported_literal: # pragma: no cover
|
|
867
|
+
imports.append(literal)
|
|
830
868
|
|
|
831
869
|
@classmethod
|
|
832
870
|
def _create_set_from_list(cls, data_type: DataType) -> Optional[DataType]:
|
|
@@ -906,6 +944,7 @@ class Parser(ABC):
|
|
|
906
944
|
name=model.name,
|
|
907
945
|
path=model.reference.path + '/reuse',
|
|
908
946
|
),
|
|
947
|
+
custom_template_dir=model._custom_template_dir,
|
|
909
948
|
)
|
|
910
949
|
if cached_model_reference.path in require_update_action_models:
|
|
911
950
|
require_update_action_models.append(inherited_model.path)
|
|
@@ -947,7 +986,7 @@ class Parser(ABC):
|
|
|
947
986
|
if d.is_dict or d.is_union
|
|
948
987
|
)
|
|
949
988
|
):
|
|
950
|
-
continue
|
|
989
|
+
continue # pragma: no cover
|
|
951
990
|
|
|
952
991
|
# set copied data_type
|
|
953
992
|
copied_data_type = root_type_field.data_type.copy()
|
|
@@ -973,12 +1012,15 @@ class Parser(ABC):
|
|
|
973
1012
|
root_type_field.constraints, model_field.constraints
|
|
974
1013
|
)
|
|
975
1014
|
if isinstance(
|
|
976
|
-
root_type_field,
|
|
977
|
-
|
|
1015
|
+
root_type_field,
|
|
1016
|
+
pydantic_model.DataModelField,
|
|
1017
|
+
) and not model_field.extras.get('discriminator'):
|
|
978
1018
|
discriminator = root_type_field.extras.get('discriminator')
|
|
979
|
-
if discriminator:
|
|
1019
|
+
if discriminator:
|
|
980
1020
|
model_field.extras['discriminator'] = discriminator
|
|
981
|
-
data_type.parent.data_types.remove(
|
|
1021
|
+
data_type.parent.data_types.remove(
|
|
1022
|
+
data_type
|
|
1023
|
+
) # pragma: no cover
|
|
982
1024
|
data_type.parent.data_types.append(copied_data_type)
|
|
983
1025
|
|
|
984
1026
|
elif isinstance(data_type.parent, DataType):
|
|
@@ -1135,6 +1177,32 @@ class Parser(ABC):
|
|
|
1135
1177
|
if model_field.nullable is not True: # pragma: no cover
|
|
1136
1178
|
model_field.nullable = False
|
|
1137
1179
|
|
|
1180
|
+
@classmethod
|
|
1181
|
+
def __postprocess_result_modules(cls, results):
|
|
1182
|
+
def process(input_tuple) -> Tuple[str, ...]:
|
|
1183
|
+
r = []
|
|
1184
|
+
for item in input_tuple:
|
|
1185
|
+
p = item.split('.')
|
|
1186
|
+
if len(p) > 1:
|
|
1187
|
+
r.extend(p[:-1])
|
|
1188
|
+
r.append(p[-1])
|
|
1189
|
+
else:
|
|
1190
|
+
r.append(item)
|
|
1191
|
+
|
|
1192
|
+
r = r[:-2] + [f'{r[-2]}.{r[-1]}']
|
|
1193
|
+
return tuple(r)
|
|
1194
|
+
|
|
1195
|
+
results = {process(k): v for k, v in results.items()}
|
|
1196
|
+
|
|
1197
|
+
init_result = [v for k, v in results.items() if k[-1] == '__init__.py'][0]
|
|
1198
|
+
folders = {t[:-1] if t[-1].endswith('.py') else t for t in results.keys()}
|
|
1199
|
+
for folder in folders:
|
|
1200
|
+
for i in range(len(folder)):
|
|
1201
|
+
subfolder = folder[: i + 1]
|
|
1202
|
+
init_file = subfolder + ('__init__.py',)
|
|
1203
|
+
results.update({init_file: init_result})
|
|
1204
|
+
return results
|
|
1205
|
+
|
|
1138
1206
|
def __change_imported_model_name(
|
|
1139
1207
|
self,
|
|
1140
1208
|
models: List[DataModel],
|
|
@@ -1238,7 +1306,7 @@ class Parser(ABC):
|
|
|
1238
1306
|
processed_models: List[Processed] = []
|
|
1239
1307
|
|
|
1240
1308
|
for module, models in module_models:
|
|
1241
|
-
imports = module_to_import[module] = Imports()
|
|
1309
|
+
imports = module_to_import[module] = Imports(self.use_exact_imports)
|
|
1242
1310
|
init = False
|
|
1243
1311
|
if module:
|
|
1244
1312
|
parent = (*module[:-1], '__init__.py')
|
|
@@ -1249,6 +1317,7 @@ class Parser(ABC):
|
|
|
1249
1317
|
init = True
|
|
1250
1318
|
else:
|
|
1251
1319
|
module = (*module[:-1], f'{module[-1]}.py')
|
|
1320
|
+
module = tuple(part.replace('-', '_') for part in module)
|
|
1252
1321
|
else:
|
|
1253
1322
|
module = ('__init__.py',)
|
|
1254
1323
|
|
|
@@ -1270,6 +1339,10 @@ class Parser(ABC):
|
|
|
1270
1339
|
Processed(module, models, init, imports, scoped_model_resolver)
|
|
1271
1340
|
)
|
|
1272
1341
|
|
|
1342
|
+
for processed_model in processed_models:
|
|
1343
|
+
for model in processed_model.models:
|
|
1344
|
+
processed_model.imports.append(model.imports)
|
|
1345
|
+
|
|
1273
1346
|
for unused_model in unused_models:
|
|
1274
1347
|
module, models = model_to_module_models[unused_model]
|
|
1275
1348
|
if unused_model in models: # pragma: no cover
|
|
@@ -1277,6 +1350,18 @@ class Parser(ABC):
|
|
|
1277
1350
|
imports.remove(unused_model.imports)
|
|
1278
1351
|
models.remove(unused_model)
|
|
1279
1352
|
|
|
1353
|
+
for processed_model in processed_models:
|
|
1354
|
+
# postprocess imports to remove unused imports.
|
|
1355
|
+
model_code = str('\n'.join([str(m) for m in processed_model.models]))
|
|
1356
|
+
unused_imports = [
|
|
1357
|
+
(from_, import_)
|
|
1358
|
+
for from_, imports_ in processed_model.imports.items()
|
|
1359
|
+
for import_ in imports_
|
|
1360
|
+
if import_ not in model_code
|
|
1361
|
+
]
|
|
1362
|
+
for from_, import_ in unused_imports:
|
|
1363
|
+
processed_model.imports.remove(Import(from_=from_, import_=import_))
|
|
1364
|
+
|
|
1280
1365
|
for module, models, init, imports, scoped_model_resolver in processed_models:
|
|
1281
1366
|
# process after removing unused models
|
|
1282
1367
|
self.__change_imported_model_name(models, imports, scoped_model_resolver)
|
|
@@ -1311,4 +1396,20 @@ class Parser(ABC):
|
|
|
1311
1396
|
if [*results] == [('__init__.py',)]:
|
|
1312
1397
|
return results[('__init__.py',)].body
|
|
1313
1398
|
|
|
1399
|
+
results = {tuple(i.replace('-', '_') for i in k): v for k, v in results.items()}
|
|
1400
|
+
results = (
|
|
1401
|
+
self.__postprocess_result_modules(results)
|
|
1402
|
+
if self.treat_dots_as_module
|
|
1403
|
+
else {
|
|
1404
|
+
tuple(
|
|
1405
|
+
(
|
|
1406
|
+
part[: part.rfind('.')].replace('.', '_')
|
|
1407
|
+
+ part[part.rfind('.') :]
|
|
1408
|
+
)
|
|
1409
|
+
for part in k
|
|
1410
|
+
): v
|
|
1411
|
+
for k, v in results.items()
|
|
1412
|
+
}
|
|
1413
|
+
)
|
|
1414
|
+
|
|
1314
1415
|
return results
|
|
@@ -37,11 +37,7 @@ from datamodel_code_generator.parser.base import (
|
|
|
37
37
|
escape_characters,
|
|
38
38
|
)
|
|
39
39
|
from datamodel_code_generator.reference import ModelType, Reference
|
|
40
|
-
from datamodel_code_generator.types import
|
|
41
|
-
DataTypeManager,
|
|
42
|
-
StrictTypes,
|
|
43
|
-
Types,
|
|
44
|
-
)
|
|
40
|
+
from datamodel_code_generator.types import DataTypeManager, StrictTypes, Types
|
|
45
41
|
|
|
46
42
|
try:
|
|
47
43
|
import graphql
|
|
@@ -158,6 +154,8 @@ class GraphQLParser(Parser):
|
|
|
158
154
|
custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
|
|
159
155
|
use_pendulum: bool = False,
|
|
160
156
|
http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
|
|
157
|
+
treat_dots_as_module: bool = False,
|
|
158
|
+
use_exact_imports: bool = False,
|
|
161
159
|
) -> None:
|
|
162
160
|
super().__init__(
|
|
163
161
|
source=source,
|
|
@@ -225,18 +223,22 @@ class GraphQLParser(Parser):
|
|
|
225
223
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
226
224
|
use_pendulum=use_pendulum,
|
|
227
225
|
http_query_parameters=http_query_parameters,
|
|
226
|
+
treat_dots_as_module=treat_dots_as_module,
|
|
227
|
+
use_exact_imports=use_exact_imports,
|
|
228
228
|
)
|
|
229
229
|
|
|
230
230
|
self.data_model_scalar_type = data_model_scalar_type
|
|
231
231
|
self.data_model_union_type = data_model_union_type
|
|
232
|
+
self.use_standard_collections = use_standard_collections
|
|
233
|
+
self.use_union_operator = use_union_operator
|
|
232
234
|
|
|
233
235
|
def _get_context_source_path_parts(self) -> Iterator[Tuple[Source, List[str]]]:
|
|
234
236
|
# TODO (denisart): Temporarily this method duplicates
|
|
235
237
|
# the method `datamodel_code_generator.parser.jsonschema.JsonSchemaParser._get_context_source_path_parts`.
|
|
236
238
|
|
|
237
|
-
if isinstance(self.source, list) or (
|
|
239
|
+
if isinstance(self.source, list) or ( # pragma: no cover
|
|
238
240
|
isinstance(self.source, Path) and self.source.is_dir()
|
|
239
|
-
):
|
|
241
|
+
): # pragma: no cover
|
|
240
242
|
self.current_source_path = Path()
|
|
241
243
|
self.model_resolver.after_load_files = {
|
|
242
244
|
self.base_path.joinpath(s.path).resolve().as_posix()
|
|
@@ -244,11 +246,11 @@ class GraphQLParser(Parser):
|
|
|
244
246
|
}
|
|
245
247
|
|
|
246
248
|
for source in self.iter_source:
|
|
247
|
-
if isinstance(self.source, ParseResult):
|
|
249
|
+
if isinstance(self.source, ParseResult): # pragma: no cover
|
|
248
250
|
path_parts = self.get_url_path_parts(self.source)
|
|
249
251
|
else:
|
|
250
252
|
path_parts = list(source.path.parts)
|
|
251
|
-
if self.current_source_path is not None:
|
|
253
|
+
if self.current_source_path is not None: # pragma: no cover
|
|
252
254
|
self.current_source_path = source.path
|
|
253
255
|
with self.model_resolver.current_base_path_context(
|
|
254
256
|
source.path.parent
|
|
@@ -265,7 +267,7 @@ class GraphQLParser(Parser):
|
|
|
265
267
|
|
|
266
268
|
resolved_type = graphql_resolver.kind(type_, None)
|
|
267
269
|
|
|
268
|
-
if resolved_type in self.support_graphql_types:
|
|
270
|
+
if resolved_type in self.support_graphql_types: # pragma: no cover
|
|
269
271
|
self.all_graphql_objects[type_.name] = type_
|
|
270
272
|
# TODO: need a special method for each graph type
|
|
271
273
|
self.references[type_.name] = Reference(
|
|
@@ -279,8 +281,13 @@ class GraphQLParser(Parser):
|
|
|
279
281
|
def _typename_field(self, name: str) -> DataModelFieldBase:
|
|
280
282
|
return self.data_model_field_type(
|
|
281
283
|
name='typename__',
|
|
282
|
-
data_type=DataType(
|
|
284
|
+
data_type=DataType(
|
|
285
|
+
literals=[name],
|
|
286
|
+
use_union_operator=self.use_union_operator,
|
|
287
|
+
use_standard_collections=self.use_standard_collections,
|
|
288
|
+
),
|
|
283
289
|
default=name,
|
|
290
|
+
use_annotated=self.use_annotated,
|
|
284
291
|
required=False,
|
|
285
292
|
alias='__typename',
|
|
286
293
|
use_one_literal_as_default=True,
|
|
@@ -344,7 +351,11 @@ class GraphQLParser(Parser):
|
|
|
344
351
|
alias: str,
|
|
345
352
|
field: Union[graphql.GraphQLField, graphql.GraphQLInputField],
|
|
346
353
|
) -> DataModelFieldBase:
|
|
347
|
-
final_data_type = DataType(
|
|
354
|
+
final_data_type = DataType(
|
|
355
|
+
is_optional=True,
|
|
356
|
+
use_union_operator=self.use_union_operator,
|
|
357
|
+
use_standard_collections=self.use_standard_collections,
|
|
358
|
+
)
|
|
348
359
|
data_type = final_data_type
|
|
349
360
|
obj = field.type
|
|
350
361
|
|
|
@@ -352,11 +363,15 @@ class GraphQLParser(Parser):
|
|
|
352
363
|
if graphql.is_list_type(obj):
|
|
353
364
|
data_type.is_list = True
|
|
354
365
|
|
|
355
|
-
new_data_type = DataType(
|
|
366
|
+
new_data_type = DataType(
|
|
367
|
+
is_optional=True,
|
|
368
|
+
use_union_operator=self.use_union_operator,
|
|
369
|
+
use_standard_collections=self.use_standard_collections,
|
|
370
|
+
)
|
|
356
371
|
data_type.data_types = [new_data_type]
|
|
357
372
|
|
|
358
373
|
data_type = new_data_type
|
|
359
|
-
elif graphql.is_non_null_type(obj):
|
|
374
|
+
elif graphql.is_non_null_type(obj): # pragma: no cover
|
|
360
375
|
data_type.is_optional = False
|
|
361
376
|
|
|
362
377
|
obj = obj.of_type
|
|
@@ -368,10 +383,10 @@ class GraphQLParser(Parser):
|
|
|
368
383
|
)
|
|
369
384
|
extras = {}
|
|
370
385
|
|
|
371
|
-
if hasattr(field, 'default_value'):
|
|
372
|
-
if field.default_value == graphql.pyutils.Undefined:
|
|
386
|
+
if hasattr(field, 'default_value'): # pragma: no cover
|
|
387
|
+
if field.default_value == graphql.pyutils.Undefined: # pragma: no cover
|
|
373
388
|
default = None
|
|
374
|
-
else:
|
|
389
|
+
else: # pragma: no cover
|
|
375
390
|
default = field.default_value
|
|
376
391
|
else:
|
|
377
392
|
if required is False:
|
|
@@ -421,7 +436,7 @@ class GraphQLParser(Parser):
|
|
|
421
436
|
fields.append(self._typename_field(obj.name))
|
|
422
437
|
|
|
423
438
|
base_classes = []
|
|
424
|
-
if hasattr(obj, 'interfaces'):
|
|
439
|
+
if hasattr(obj, 'interfaces'): # pragma: no cover
|
|
425
440
|
base_classes = [self.references[i.name] for i in obj.interfaces]
|
|
426
441
|
|
|
427
442
|
data_model_type = self.data_model_type(
|
|
@@ -447,7 +462,7 @@ class GraphQLParser(Parser):
|
|
|
447
462
|
def parse_input_object(
|
|
448
463
|
self, input_graphql_object: graphql.GraphQLInputObjectType
|
|
449
464
|
) -> None:
|
|
450
|
-
self.parse_object_like(input_graphql_object)
|
|
465
|
+
self.parse_object_like(input_graphql_object) # pragma: no cover
|
|
451
466
|
|
|
452
467
|
def parse_union(self, union_object: graphql.GraphQLUnionType) -> None:
|
|
453
468
|
fields = []
|
|
@@ -440,6 +440,8 @@ class JsonSchemaParser(Parser):
|
|
|
440
440
|
custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
|
|
441
441
|
use_pendulum: bool = False,
|
|
442
442
|
http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
|
|
443
|
+
treat_dots_as_module: bool = False,
|
|
444
|
+
use_exact_imports: bool = False,
|
|
443
445
|
) -> None:
|
|
444
446
|
super().__init__(
|
|
445
447
|
source=source,
|
|
@@ -507,6 +509,8 @@ class JsonSchemaParser(Parser):
|
|
|
507
509
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
508
510
|
use_pendulum=use_pendulum,
|
|
509
511
|
http_query_parameters=http_query_parameters,
|
|
512
|
+
treat_dots_as_module=treat_dots_as_module,
|
|
513
|
+
use_exact_imports=use_exact_imports,
|
|
510
514
|
)
|
|
511
515
|
|
|
512
516
|
self.remote_object_cache: DefaultPutDict[str, Dict[str, Any]] = DefaultPutDict()
|
|
@@ -758,10 +762,10 @@ class JsonSchemaParser(Parser):
|
|
|
758
762
|
return self.data_type(reference=base_classes[0])
|
|
759
763
|
if required:
|
|
760
764
|
for field in fields:
|
|
761
|
-
if self.force_optional_for_required_fields or (
|
|
765
|
+
if self.force_optional_for_required_fields or ( # pragma: no cover
|
|
762
766
|
self.apply_default_values_for_required_fields and field.has_default
|
|
763
767
|
):
|
|
764
|
-
continue
|
|
768
|
+
continue # pragma: no cover
|
|
765
769
|
if (field.original_name or field.name) in required:
|
|
766
770
|
field.required = True
|
|
767
771
|
if obj.required:
|
|
@@ -1310,7 +1314,11 @@ class JsonSchemaParser(Parser):
|
|
|
1310
1314
|
elif obj.custom_type_path:
|
|
1311
1315
|
data_type = self.data_type_manager.get_data_type_from_full_path(
|
|
1312
1316
|
obj.custom_type_path, is_custom_type=True
|
|
1313
|
-
)
|
|
1317
|
+
) # pragma: no cover
|
|
1318
|
+
elif obj.is_array:
|
|
1319
|
+
data_type = self.parse_array_fields(
|
|
1320
|
+
name, obj, get_special_path('array', path)
|
|
1321
|
+
).data_type # pragma: no cover
|
|
1314
1322
|
elif obj.anyOf or obj.oneOf:
|
|
1315
1323
|
reference = self.model_resolver.add(
|
|
1316
1324
|
path, name, loaded=True, class_name=True
|
|
@@ -1324,9 +1332,9 @@ class JsonSchemaParser(Parser):
|
|
|
1324
1332
|
name, obj, get_special_path('oneOf', path)
|
|
1325
1333
|
)
|
|
1326
1334
|
|
|
1327
|
-
if len(data_types) > 1:
|
|
1335
|
+
if len(data_types) > 1: # pragma: no cover
|
|
1328
1336
|
data_type = self.data_type(data_types=data_types)
|
|
1329
|
-
elif not data_types:
|
|
1337
|
+
elif not data_types: # pragma: no cover
|
|
1330
1338
|
return EmptyDataType()
|
|
1331
1339
|
else: # pragma: no cover
|
|
1332
1340
|
data_type = data_types[0]
|
|
@@ -1655,7 +1663,7 @@ class JsonSchemaParser(Parser):
|
|
|
1655
1663
|
elif obj.oneOf or obj.anyOf:
|
|
1656
1664
|
data_type = self.parse_root_type(name, obj, path)
|
|
1657
1665
|
if isinstance(data_type, EmptyDataType) and obj.properties:
|
|
1658
|
-
self.parse_object(name, obj, path)
|
|
1666
|
+
self.parse_object(name, obj, path) # pragma: no cover
|
|
1659
1667
|
elif obj.properties:
|
|
1660
1668
|
self.parse_object(name, obj, path)
|
|
1661
1669
|
elif obj.patternProperties:
|
|
@@ -222,6 +222,8 @@ class OpenAPIParser(JsonSchemaParser):
|
|
|
222
222
|
custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
|
|
223
223
|
use_pendulum: bool = False,
|
|
224
224
|
http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
|
|
225
|
+
treat_dots_as_module: bool = False,
|
|
226
|
+
use_exact_imports: bool = False,
|
|
225
227
|
):
|
|
226
228
|
super().__init__(
|
|
227
229
|
source=source,
|
|
@@ -289,6 +291,8 @@ class OpenAPIParser(JsonSchemaParser):
|
|
|
289
291
|
custom_formatters_kwargs=custom_formatters_kwargs,
|
|
290
292
|
use_pendulum=use_pendulum,
|
|
291
293
|
http_query_parameters=http_query_parameters,
|
|
294
|
+
treat_dots_as_module=treat_dots_as_module,
|
|
295
|
+
use_exact_imports=use_exact_imports,
|
|
292
296
|
)
|
|
293
297
|
self.open_api_scopes: List[OpenAPIScope] = openapi_scopes or [
|
|
294
298
|
OpenAPIScope.Schemas
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
|
|
3
|
+
import pydantic.typing
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def patched_evaluate_forwardref(
|
|
7
|
+
forward_ref, globalns, localns=None
|
|
8
|
+
): # pragma: no cover
|
|
9
|
+
try:
|
|
10
|
+
return forward_ref._evaluate(
|
|
11
|
+
globalns, localns or None, set()
|
|
12
|
+
) # pragma: no cover
|
|
13
|
+
except TypeError:
|
|
14
|
+
# Fallback for Python 3.12 compatibility
|
|
15
|
+
return forward_ref._evaluate(
|
|
16
|
+
globalns, localns or None, set(), recursive_guard=set()
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
if '3.12' in sys.version: # pragma: no cover
|
|
21
|
+
pydantic.typing.evaluate_forwardref = patched_evaluate_forwardref
|
|
@@ -25,12 +25,7 @@ from typing import (
|
|
|
25
25
|
|
|
26
26
|
import pydantic
|
|
27
27
|
from packaging import version
|
|
28
|
-
from pydantic import
|
|
29
|
-
StrictBool,
|
|
30
|
-
StrictInt,
|
|
31
|
-
StrictStr,
|
|
32
|
-
create_model,
|
|
33
|
-
)
|
|
28
|
+
from pydantic import StrictBool, StrictInt, StrictStr, create_model
|
|
34
29
|
|
|
35
30
|
from datamodel_code_generator.format import PythonVersion
|
|
36
31
|
from datamodel_code_generator.imports import (
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
version: str = '0.25.8'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "datamodel-code-generator"
|
|
3
|
-
version = "0.25.
|
|
3
|
+
version = "0.25.8"
|
|
4
4
|
description = "Datamodel Code Generator"
|
|
5
5
|
authors = ["Koudai Aono <koxudaxi@gmail.com>"]
|
|
6
6
|
readme = "README.md"
|
|
@@ -78,7 +78,7 @@ types-setuptools = ">=67.6.0.5,<70.0.0.0"
|
|
|
78
78
|
pydantic = "*"
|
|
79
79
|
httpx = ">=0.24.1"
|
|
80
80
|
PySnooper = "*"
|
|
81
|
-
ruff = ">=0.0.290,<0.4.
|
|
81
|
+
ruff = ">=0.0.290,<0.4.9"
|
|
82
82
|
ruff-lsp = ">=0.0.39,<0.0.41"
|
|
83
83
|
pre-commit = "*"
|
|
84
84
|
pytest-xdist = "^3.3.1"
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
version: str = '0.25.6'
|
|
File without changes
|
|
File without changes
|
{datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/http.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{datamodel_code_generator-0.25.6 → datamodel_code_generator-0.25.8}/datamodel_code_generator/util.py
RENAMED
|
File without changes
|