datamodel-code-generator 0.19.0__tar.gz → 0.20.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamodel-code-generator might be problematic. Click here for more details.
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/PKG-INFO +11 -51
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/README.md +9 -43
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/__init__.py +10 -4
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/__main__.py +23 -1
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/format.py +29 -3
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/__init__.py +17 -3
- datamodel_code_generator-0.20.0/datamodel_code_generator/model/improts.py +8 -0
- datamodel_code_generator-0.20.0/datamodel_code_generator/model/template/TypedDict.jinja2 +5 -0
- datamodel_code_generator-0.20.0/datamodel_code_generator/model/template/TypedDictClass.jinja2 +17 -0
- datamodel_code_generator-0.20.0/datamodel_code_generator/model/template/TypedDictFunction.jinja2 +16 -0
- datamodel_code_generator-0.20.0/datamodel_code_generator/model/typed_dict.py +151 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/parser/base.py +1 -6
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/parser/jsonschema.py +9 -6
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/parser/openapi.py +3 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/types.py +3 -0
- datamodel_code_generator-0.20.0/datamodel_code_generator/version.py +1 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/pyproject.toml +4 -6
- datamodel_code_generator-0.19.0/datamodel_code_generator/model/improts.py +0 -4
- datamodel_code_generator-0.19.0/datamodel_code_generator/version.py +0 -1
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/LICENSE +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/http.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/imports.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/base.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/dataclass.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/enum.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/pydantic/__init__.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/pydantic/base_model.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/pydantic/custom_root_type.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/pydantic/dataclass.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/pydantic/imports.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/pydantic/types.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/rootmodel.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/template/Enum.jinja2 +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/template/dataclass.jinja2 +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/template/pydantic/Config.jinja2 +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/template/root.jinja2 +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/model/types.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/parser/__init__.py +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/py.typed +0 -0
- {datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/reference.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: datamodel-code-generator
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.20.0
|
|
4
4
|
Summary: Datamodel Code Generator
|
|
5
5
|
Home-page: https://github.com/koxudaxi/datamodel-code-generator
|
|
6
6
|
License: MIT
|
|
@@ -16,12 +16,6 @@ Classifier: Programming Language :: Python :: 3.8
|
|
|
16
16
|
Classifier: Programming Language :: Python :: 3.9
|
|
17
17
|
Classifier: Programming Language :: Python :: 3.10
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.11
|
|
19
|
-
Classifier: Programming Language :: Python :: 3
|
|
20
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
21
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
22
|
-
Classifier: Programming Language :: Python :: 3.7
|
|
23
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
24
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
25
19
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
26
20
|
Provides-Extra: http
|
|
27
21
|
Requires-Dist: PySnooper (>=0.4.1,<2.0.0)
|
|
@@ -32,7 +26,7 @@ Requires-Dist: httpx ; extra == "http"
|
|
|
32
26
|
Requires-Dist: inflect (>=4.1.0,<6.0)
|
|
33
27
|
Requires-Dist: isort (>=4.3.21,<6.0)
|
|
34
28
|
Requires-Dist: jinja2 (>=2.10.1,<4.0)
|
|
35
|
-
Requires-Dist: openapi-spec-validator (>=0.2.8,<=0.5.
|
|
29
|
+
Requires-Dist: openapi-spec-validator (>=0.2.8,<=0.5.2)
|
|
36
30
|
Requires-Dist: packaging
|
|
37
31
|
Requires-Dist: prance (>=0.18.2,<1.0)
|
|
38
32
|
Requires-Dist: pydantic[email] (>=1.10.0,<2.0.0) ; python_version >= "3.11" and python_version < "4.0"
|
|
@@ -44,7 +38,7 @@ Description-Content-Type: text/markdown
|
|
|
44
38
|
|
|
45
39
|
# datamodel-code-generator
|
|
46
40
|
|
|
47
|
-
This code generator creates [pydantic](https://docs.pydantic.dev/) model
|
|
41
|
+
This code generator creates [pydantic](https://docs.pydantic.dev/) model, [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html) and [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict) from an openapi file and others.
|
|
48
42
|
|
|
49
43
|
[](https://github.com/koxudaxi/datamodel-code-generator/actions?query=workflow%3ATest)
|
|
50
44
|
[](https://pypi.python.org/pypi/datamodel-code-generator)
|
|
@@ -69,7 +63,7 @@ To install `datamodel-code-generator`:
|
|
|
69
63
|
$ pip install datamodel-code-generator
|
|
70
64
|
```
|
|
71
65
|
|
|
72
|
-
## Simple
|
|
66
|
+
## Simple Usage
|
|
73
67
|
You can generate models from a local file.
|
|
74
68
|
```bash
|
|
75
69
|
$ datamodel-codegen --input api.yaml --output model.py
|
|
@@ -279,14 +273,14 @@ class Apis(BaseModel):
|
|
|
279
273
|
```
|
|
280
274
|
</details>
|
|
281
275
|
|
|
282
|
-
##
|
|
283
|
-
These OSS use datamodel-code-generator to generate many models.
|
|
276
|
+
## Projects that use datamodel-code-generator
|
|
277
|
+
These OSS projects use datamodel-code-generator to generate many models. See the following linked projects for real world examples and inspiration.
|
|
284
278
|
- [Netflix/consoleme](https://github.com/Netflix/consoleme)
|
|
285
279
|
- *[How do I generate models from the Swagger specification?](https://github.com/Netflix/consoleme/blob/master/docs/gitbook/faq.md#how-do-i-generate-models-from-the-swagger-specification)*
|
|
286
280
|
- [DataDog/integrations-core](https://github.com/DataDog/integrations-core)
|
|
287
281
|
- *[Config models](https://github.com/DataDog/integrations-core/blob/master/docs/developer/meta/config-models.md)*
|
|
288
282
|
- [awslabs/aws-lambda-powertools-python](https://github.com/awslabs/aws-lambda-powertools-python)
|
|
289
|
-
- *
|
|
283
|
+
- *Recommended for [advanced-use-cases](https://awslabs.github.io/aws-lambda-powertools-python/2.6.0/utilities/parser/#advanced-use-cases) in the official documentation*
|
|
290
284
|
- [open-metadata/OpenMetadata](https://github.com/open-metadata/OpenMetadata)
|
|
291
285
|
- [Makefile](https://github.com/open-metadata/OpenMetadata/blob/main/Makefile)
|
|
292
286
|
- [airbytehq/airbyte](https://github.com/airbytehq/airbyte)
|
|
@@ -304,6 +298,7 @@ These OSS use datamodel-code-generator to generate many models. We can learn abo
|
|
|
304
298
|
## Supported output types
|
|
305
299
|
- [pydantic](https://docs.pydantic.dev/).BaseModel
|
|
306
300
|
- [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html)
|
|
301
|
+
- [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict)
|
|
307
302
|
|
|
308
303
|
## Installation
|
|
309
304
|
|
|
@@ -329,7 +324,7 @@ You can genearte models from a URL.
|
|
|
329
324
|
```bash
|
|
330
325
|
$ datamodel-codegen --url https://<INPUT FILE URL> --output model.py
|
|
331
326
|
```
|
|
332
|
-
This method needs
|
|
327
|
+
This method needs the [http extra option](#http-extra-option)
|
|
333
328
|
|
|
334
329
|
|
|
335
330
|
## All Command Options
|
|
@@ -340,7 +335,7 @@ usage: datamodel-codegen [-h] [--input INPUT] [--url URL]
|
|
|
340
335
|
[--http-headers HTTP_HEADER [HTTP_HEADER ...]]
|
|
341
336
|
[--http-ignore-tls]
|
|
342
337
|
[--input-file-type {auto,openapi,jsonschema,json,yaml,dict,csv}]
|
|
343
|
-
[--output-model-type {pydantic.BaseModel,dataclasses.dataclass}]
|
|
338
|
+
[--output-model-type {pydantic.BaseModel,dataclasses.dataclass,typing.TypedDict}]
|
|
344
339
|
[--openapi-scopes {schemas,paths,tags,parameters} [{schemas,paths,tags,parameters} ...]]
|
|
345
340
|
[--output OUTPUT] [--base-class BASE_CLASS]
|
|
346
341
|
[--field-constraints] [--use-annotated]
|
|
@@ -391,7 +386,7 @@ options:
|
|
|
391
386
|
certificate
|
|
392
387
|
--input-file-type {auto,openapi,jsonschema,json,yaml,dict,csv}
|
|
393
388
|
Input file type (default: auto)
|
|
394
|
-
--output-model-type {pydantic.BaseModel,dataclasses.dataclass}
|
|
389
|
+
--output-model-type {pydantic.BaseModel,dataclasses.dataclass,typing.TypedDict}
|
|
395
390
|
Output model type (default: pydantic.BaseModel)
|
|
396
391
|
--openapi-scopes {schemas,paths,tags,parameters} [{schemas,paths,tags,parameters} ...]
|
|
397
392
|
Scopes of OpenAPI model generation (default: schemas)
|
|
@@ -497,41 +492,6 @@ options:
|
|
|
497
492
|
--version show version
|
|
498
493
|
```
|
|
499
494
|
|
|
500
|
-
|
|
501
|
-
## Implemented list
|
|
502
|
-
### OpenAPI 3 and JsonSchema
|
|
503
|
-
#### DataType
|
|
504
|
-
- string (include patter/minLength/maxLenght)
|
|
505
|
-
- number (include maximum/exclusiveMaximum/minimum/exclusiveMinimum/multipleOf/le/ge)
|
|
506
|
-
- integer (include maximum/exclusiveMaximum/minimum/exclusiveMinimum/multipleOf/le/ge)
|
|
507
|
-
- boolean
|
|
508
|
-
- array
|
|
509
|
-
- object
|
|
510
|
-
|
|
511
|
-
##### String Format
|
|
512
|
-
- date
|
|
513
|
-
- datetime
|
|
514
|
-
- time
|
|
515
|
-
- password
|
|
516
|
-
- email
|
|
517
|
-
- idn-email
|
|
518
|
-
- uuid (uuid1/uuid2/uuid3/uuid4/uuid5)
|
|
519
|
-
- ipv4
|
|
520
|
-
- ipv6
|
|
521
|
-
- ipv4-network
|
|
522
|
-
- ipv6-network
|
|
523
|
-
- hostname
|
|
524
|
-
- decimal
|
|
525
|
-
|
|
526
|
-
#### Other schema
|
|
527
|
-
- enum (as enum.Enum or typing.Literal)
|
|
528
|
-
- allOf (as Multiple inheritance)
|
|
529
|
-
- anyOf (as typing.Union)
|
|
530
|
-
- oneOf (as typing.Union)
|
|
531
|
-
- $ref ([http extra](#http-extra-option) is required when resolving $ref for remote files.)
|
|
532
|
-
- $id (for [JSONSchema](https://json-schema.org/understanding-json-schema/structuring.html#the-id-property))
|
|
533
|
-
|
|
534
|
-
|
|
535
495
|
## Related projects
|
|
536
496
|
### fastapi-code-generator
|
|
537
497
|
This code generator creates [FastAPI](https://github.com/tiangolo/fastapi) app from an openapi file.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# datamodel-code-generator
|
|
2
2
|
|
|
3
|
-
This code generator creates [pydantic](https://docs.pydantic.dev/) model
|
|
3
|
+
This code generator creates [pydantic](https://docs.pydantic.dev/) model, [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html) and [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict) from an openapi file and others.
|
|
4
4
|
|
|
5
5
|
[](https://github.com/koxudaxi/datamodel-code-generator/actions?query=workflow%3ATest)
|
|
6
6
|
[](https://pypi.python.org/pypi/datamodel-code-generator)
|
|
@@ -25,7 +25,7 @@ To install `datamodel-code-generator`:
|
|
|
25
25
|
$ pip install datamodel-code-generator
|
|
26
26
|
```
|
|
27
27
|
|
|
28
|
-
## Simple
|
|
28
|
+
## Simple Usage
|
|
29
29
|
You can generate models from a local file.
|
|
30
30
|
```bash
|
|
31
31
|
$ datamodel-codegen --input api.yaml --output model.py
|
|
@@ -235,14 +235,14 @@ class Apis(BaseModel):
|
|
|
235
235
|
```
|
|
236
236
|
</details>
|
|
237
237
|
|
|
238
|
-
##
|
|
239
|
-
These OSS use datamodel-code-generator to generate many models.
|
|
238
|
+
## Projects that use datamodel-code-generator
|
|
239
|
+
These OSS projects use datamodel-code-generator to generate many models. See the following linked projects for real world examples and inspiration.
|
|
240
240
|
- [Netflix/consoleme](https://github.com/Netflix/consoleme)
|
|
241
241
|
- *[How do I generate models from the Swagger specification?](https://github.com/Netflix/consoleme/blob/master/docs/gitbook/faq.md#how-do-i-generate-models-from-the-swagger-specification)*
|
|
242
242
|
- [DataDog/integrations-core](https://github.com/DataDog/integrations-core)
|
|
243
243
|
- *[Config models](https://github.com/DataDog/integrations-core/blob/master/docs/developer/meta/config-models.md)*
|
|
244
244
|
- [awslabs/aws-lambda-powertools-python](https://github.com/awslabs/aws-lambda-powertools-python)
|
|
245
|
-
- *
|
|
245
|
+
- *Recommended for [advanced-use-cases](https://awslabs.github.io/aws-lambda-powertools-python/2.6.0/utilities/parser/#advanced-use-cases) in the official documentation*
|
|
246
246
|
- [open-metadata/OpenMetadata](https://github.com/open-metadata/OpenMetadata)
|
|
247
247
|
- [Makefile](https://github.com/open-metadata/OpenMetadata/blob/main/Makefile)
|
|
248
248
|
- [airbytehq/airbyte](https://github.com/airbytehq/airbyte)
|
|
@@ -260,6 +260,7 @@ These OSS use datamodel-code-generator to generate many models. We can learn abo
|
|
|
260
260
|
## Supported output types
|
|
261
261
|
- [pydantic](https://docs.pydantic.dev/).BaseModel
|
|
262
262
|
- [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html)
|
|
263
|
+
- [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict)
|
|
263
264
|
|
|
264
265
|
## Installation
|
|
265
266
|
|
|
@@ -285,7 +286,7 @@ You can genearte models from a URL.
|
|
|
285
286
|
```bash
|
|
286
287
|
$ datamodel-codegen --url https://<INPUT FILE URL> --output model.py
|
|
287
288
|
```
|
|
288
|
-
This method needs
|
|
289
|
+
This method needs the [http extra option](#http-extra-option)
|
|
289
290
|
|
|
290
291
|
|
|
291
292
|
## All Command Options
|
|
@@ -296,7 +297,7 @@ usage: datamodel-codegen [-h] [--input INPUT] [--url URL]
|
|
|
296
297
|
[--http-headers HTTP_HEADER [HTTP_HEADER ...]]
|
|
297
298
|
[--http-ignore-tls]
|
|
298
299
|
[--input-file-type {auto,openapi,jsonschema,json,yaml,dict,csv}]
|
|
299
|
-
[--output-model-type {pydantic.BaseModel,dataclasses.dataclass}]
|
|
300
|
+
[--output-model-type {pydantic.BaseModel,dataclasses.dataclass,typing.TypedDict}]
|
|
300
301
|
[--openapi-scopes {schemas,paths,tags,parameters} [{schemas,paths,tags,parameters} ...]]
|
|
301
302
|
[--output OUTPUT] [--base-class BASE_CLASS]
|
|
302
303
|
[--field-constraints] [--use-annotated]
|
|
@@ -347,7 +348,7 @@ options:
|
|
|
347
348
|
certificate
|
|
348
349
|
--input-file-type {auto,openapi,jsonschema,json,yaml,dict,csv}
|
|
349
350
|
Input file type (default: auto)
|
|
350
|
-
--output-model-type {pydantic.BaseModel,dataclasses.dataclass}
|
|
351
|
+
--output-model-type {pydantic.BaseModel,dataclasses.dataclass,typing.TypedDict}
|
|
351
352
|
Output model type (default: pydantic.BaseModel)
|
|
352
353
|
--openapi-scopes {schemas,paths,tags,parameters} [{schemas,paths,tags,parameters} ...]
|
|
353
354
|
Scopes of OpenAPI model generation (default: schemas)
|
|
@@ -453,41 +454,6 @@ options:
|
|
|
453
454
|
--version show version
|
|
454
455
|
```
|
|
455
456
|
|
|
456
|
-
|
|
457
|
-
## Implemented list
|
|
458
|
-
### OpenAPI 3 and JsonSchema
|
|
459
|
-
#### DataType
|
|
460
|
-
- string (include patter/minLength/maxLenght)
|
|
461
|
-
- number (include maximum/exclusiveMaximum/minimum/exclusiveMinimum/multipleOf/le/ge)
|
|
462
|
-
- integer (include maximum/exclusiveMaximum/minimum/exclusiveMinimum/multipleOf/le/ge)
|
|
463
|
-
- boolean
|
|
464
|
-
- array
|
|
465
|
-
- object
|
|
466
|
-
|
|
467
|
-
##### String Format
|
|
468
|
-
- date
|
|
469
|
-
- datetime
|
|
470
|
-
- time
|
|
471
|
-
- password
|
|
472
|
-
- email
|
|
473
|
-
- idn-email
|
|
474
|
-
- uuid (uuid1/uuid2/uuid3/uuid4/uuid5)
|
|
475
|
-
- ipv4
|
|
476
|
-
- ipv6
|
|
477
|
-
- ipv4-network
|
|
478
|
-
- ipv6-network
|
|
479
|
-
- hostname
|
|
480
|
-
- decimal
|
|
481
|
-
|
|
482
|
-
#### Other schema
|
|
483
|
-
- enum (as enum.Enum or typing.Literal)
|
|
484
|
-
- allOf (as Multiple inheritance)
|
|
485
|
-
- anyOf (as typing.Union)
|
|
486
|
-
- oneOf (as typing.Union)
|
|
487
|
-
- $ref ([http extra](#http-extra-option) is required when resolving $ref for remote files.)
|
|
488
|
-
- $id (for [JSONSchema](https://json-schema.org/understanding-json-schema/structuring.html#the-id-property))
|
|
489
|
-
|
|
490
|
-
|
|
491
457
|
## Related projects
|
|
492
458
|
### fastapi-code-generator
|
|
493
459
|
This code generator creates [FastAPI](https://github.com/tiangolo/fastapi) app from an openapi file.
|
|
@@ -220,6 +220,7 @@ RAW_DATA_TYPES: List[InputFileType] = [
|
|
|
220
220
|
class DataModelType(Enum):
|
|
221
221
|
PydanticBaseModel = 'pydantic.BaseModel'
|
|
222
222
|
DataclassesDataclass = 'dataclasses.dataclass'
|
|
223
|
+
TypingTypedDict = 'typing.TypedDict'
|
|
223
224
|
|
|
224
225
|
|
|
225
226
|
class OpenAPIScope(Enum):
|
|
@@ -314,6 +315,7 @@ def generate(
|
|
|
314
315
|
capitalise_enum_members: bool = False,
|
|
315
316
|
keep_model_order: bool = False,
|
|
316
317
|
custom_file_header: Optional[str] = None,
|
|
318
|
+
custom_file_header_path: Optional[Path] = None,
|
|
317
319
|
) -> None:
|
|
318
320
|
remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
|
|
319
321
|
if isinstance(input_, str):
|
|
@@ -397,7 +399,7 @@ def generate(
|
|
|
397
399
|
|
|
398
400
|
from datamodel_code_generator.model import get_data_model_types
|
|
399
401
|
|
|
400
|
-
data_model_types = get_data_model_types(output_model_type)
|
|
402
|
+
data_model_types = get_data_model_types(output_model_type, target_python_version)
|
|
401
403
|
parser = parser_class(
|
|
402
404
|
source=input_text or input_,
|
|
403
405
|
data_model_type=data_model_types.data_model,
|
|
@@ -427,7 +429,9 @@ def generate(
|
|
|
427
429
|
use_field_description=use_field_description,
|
|
428
430
|
use_default_kwarg=use_default_kwarg,
|
|
429
431
|
reuse_model=reuse_model,
|
|
430
|
-
enum_field_as_literal=
|
|
432
|
+
enum_field_as_literal=LiteralType.All
|
|
433
|
+
if output_model_type == DataModelType.TypingTypedDict
|
|
434
|
+
else enum_field_as_literal,
|
|
431
435
|
use_one_literal_as_default=use_one_literal_as_default,
|
|
432
436
|
set_default_enum_member=set_default_enum_member,
|
|
433
437
|
use_subclass_enum=use_subclass_enum,
|
|
@@ -488,6 +492,9 @@ def generate(
|
|
|
488
492
|
|
|
489
493
|
timestamp = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
|
|
490
494
|
|
|
495
|
+
if custom_file_header is None and custom_file_header_path:
|
|
496
|
+
custom_file_header = custom_file_header_path.read_text(encoding=encoding)
|
|
497
|
+
|
|
491
498
|
header = """\
|
|
492
499
|
# generated by datamodel-codegen:
|
|
493
500
|
# filename: {}"""
|
|
@@ -497,8 +504,7 @@ def generate(
|
|
|
497
504
|
header += f'\n# version: {get_version()}'
|
|
498
505
|
|
|
499
506
|
file: Optional[IO[Any]]
|
|
500
|
-
for path,
|
|
501
|
-
body, filename = body_and_filename
|
|
507
|
+
for path, (body, filename) in modules.items():
|
|
502
508
|
if path is None:
|
|
503
509
|
file = None
|
|
504
510
|
else:
|
|
@@ -433,6 +433,15 @@ arg_parser.add_argument(
|
|
|
433
433
|
arg_parser.add_argument(
|
|
434
434
|
'--custom-file-header', help='Custom file header', type=str, default=None
|
|
435
435
|
)
|
|
436
|
+
|
|
437
|
+
arg_parser.add_argument(
|
|
438
|
+
'--custom-file-header-path',
|
|
439
|
+
help='Custom file header file path',
|
|
440
|
+
default=None,
|
|
441
|
+
type=str,
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
|
|
436
445
|
arg_parser.add_argument('--version', help='show version', action='store_true')
|
|
437
446
|
|
|
438
447
|
|
|
@@ -448,7 +457,9 @@ class Config(BaseModel):
|
|
|
448
457
|
return value
|
|
449
458
|
return cast(TextIOBase, Path(value).expanduser().resolve().open('rt'))
|
|
450
459
|
|
|
451
|
-
@validator(
|
|
460
|
+
@validator(
|
|
461
|
+
'input', 'output', 'custom_template_dir', 'custom_file_header_path', pre=True
|
|
462
|
+
)
|
|
452
463
|
def validate_path(cls, value: Any) -> Optional[Path]:
|
|
453
464
|
if value is None or isinstance(value, Path):
|
|
454
465
|
return value # pragma: no cover
|
|
@@ -488,6 +499,14 @@ class Config(BaseModel):
|
|
|
488
499
|
)
|
|
489
500
|
return values
|
|
490
501
|
|
|
502
|
+
@root_validator
|
|
503
|
+
def validate_custom_file_header(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
|
504
|
+
if values.get('custom_file_header') and values.get('custom_file_header_path'):
|
|
505
|
+
raise Error(
|
|
506
|
+
'`--custom_file_header_path` can not be used with `--custom_file_header`.'
|
|
507
|
+
) # pragma: no cover
|
|
508
|
+
return values
|
|
509
|
+
|
|
491
510
|
# Pydantic 1.5.1 doesn't support each_item=True correctly
|
|
492
511
|
@validator('http_headers', pre=True)
|
|
493
512
|
def validate_http_headers(cls, value: Any) -> Optional[List[Tuple[str, str]]]:
|
|
@@ -583,6 +602,7 @@ class Config(BaseModel):
|
|
|
583
602
|
capitalise_enum_members: bool = False
|
|
584
603
|
keep_model_order: bool = False
|
|
585
604
|
custom_file_header: Optional[str] = None
|
|
605
|
+
custom_file_header_path: Optional[Path] = None
|
|
586
606
|
|
|
587
607
|
def merge_args(self, args: Namespace) -> None:
|
|
588
608
|
set_args = {
|
|
@@ -742,6 +762,8 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
|
|
|
742
762
|
remove_special_field_name_prefix=config.remove_special_field_name_prefix,
|
|
743
763
|
capitalise_enum_members=config.capitalise_enum_members,
|
|
744
764
|
keep_model_order=config.keep_model_order,
|
|
765
|
+
custom_file_header=config.custom_file_header,
|
|
766
|
+
custom_file_header_path=config.custom_file_header_path,
|
|
745
767
|
)
|
|
746
768
|
return Exit.OK
|
|
747
769
|
except InvalidClassNameError as e:
|
|
@@ -9,6 +9,8 @@ import black
|
|
|
9
9
|
import isort
|
|
10
10
|
import toml
|
|
11
11
|
|
|
12
|
+
from datamodel_code_generator import cached_property
|
|
13
|
+
|
|
12
14
|
|
|
13
15
|
class PythonVersion(Enum):
|
|
14
16
|
PY_36 = '3.6'
|
|
@@ -18,17 +20,41 @@ class PythonVersion(Enum):
|
|
|
18
20
|
PY_310 = '3.10'
|
|
19
21
|
PY_311 = '3.11'
|
|
20
22
|
|
|
23
|
+
@cached_property
|
|
24
|
+
def _is_py_38_or_later(self) -> bool: # pragma: no cover
|
|
25
|
+
return self.value not in {self.PY_36.value, self.PY_37.value} # type: ignore
|
|
26
|
+
|
|
27
|
+
@cached_property
|
|
28
|
+
def _is_py_39_or_later(self) -> bool: # pragma: no cover
|
|
29
|
+
return self.value not in {self.PY_36.value, self.PY_37.value, self.PY_38.value} # type: ignore
|
|
30
|
+
|
|
31
|
+
@cached_property
|
|
32
|
+
def _is_py_310_or_later(self) -> bool: # pragma: no cover
|
|
33
|
+
return self.value not in {self.PY_36.value, self.PY_37.value, self.PY_38.value, self.PY_39.value} # type: ignore
|
|
34
|
+
|
|
35
|
+
@cached_property
|
|
36
|
+
def _is_py_311_or_later(self) -> bool: # pragma: no cover
|
|
37
|
+
return self.value not in {self.PY_36.value, self.PY_37.value, self.PY_38.value, self.PY_39.value, self.PY_310.value} # type: ignore
|
|
38
|
+
|
|
21
39
|
@property
|
|
22
40
|
def has_literal_type(self) -> bool:
|
|
23
|
-
return self.
|
|
41
|
+
return self._is_py_38_or_later
|
|
24
42
|
|
|
25
43
|
@property
|
|
26
44
|
def has_union_operator(self) -> bool: # pragma: no cover
|
|
27
|
-
return self.
|
|
45
|
+
return self._is_py_310_or_later
|
|
28
46
|
|
|
29
47
|
@property
|
|
30
48
|
def has_annotated_type(self) -> bool:
|
|
31
|
-
return self.
|
|
49
|
+
return self._is_py_39_or_later
|
|
50
|
+
|
|
51
|
+
@property
|
|
52
|
+
def has_typed_dict(self) -> bool:
|
|
53
|
+
return self._is_py_38_or_later
|
|
54
|
+
|
|
55
|
+
@property
|
|
56
|
+
def has_typed_dict_non_required(self) -> bool:
|
|
57
|
+
return self._is_py_311_or_later
|
|
32
58
|
|
|
33
59
|
|
|
34
60
|
if TYPE_CHECKING:
|
|
@@ -6,7 +6,7 @@ from ..types import DataTypeManager as DataTypeManagerABC
|
|
|
6
6
|
from .base import ConstraintsBase, DataModel, DataModelFieldBase
|
|
7
7
|
|
|
8
8
|
if TYPE_CHECKING:
|
|
9
|
-
from .. import DataModelType
|
|
9
|
+
from .. import DataModelType, PythonVersion
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class DataModelSet(NamedTuple):
|
|
@@ -17,9 +17,11 @@ class DataModelSet(NamedTuple):
|
|
|
17
17
|
dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]]
|
|
18
18
|
|
|
19
19
|
|
|
20
|
-
def get_data_model_types(
|
|
20
|
+
def get_data_model_types(
|
|
21
|
+
data_model_type: DataModelType, target_python_version: PythonVersion
|
|
22
|
+
) -> DataModelSet:
|
|
21
23
|
from .. import DataModelType
|
|
22
|
-
from . import dataclass, pydantic, rootmodel
|
|
24
|
+
from . import dataclass, pydantic, rootmodel, typed_dict
|
|
23
25
|
from .types import DataTypeManager
|
|
24
26
|
|
|
25
27
|
if data_model_type == DataModelType.PydanticBaseModel:
|
|
@@ -38,6 +40,18 @@ def get_data_model_types(data_model_type: DataModelType) -> DataModelSet:
|
|
|
38
40
|
data_type_manager=DataTypeManager,
|
|
39
41
|
dump_resolve_reference_action=None,
|
|
40
42
|
)
|
|
43
|
+
elif data_model_type == DataModelType.TypingTypedDict:
|
|
44
|
+
return DataModelSet(
|
|
45
|
+
data_model=typed_dict.TypedDict
|
|
46
|
+
if target_python_version.has_typed_dict
|
|
47
|
+
else typed_dict.TypedDictBackport,
|
|
48
|
+
root_model=rootmodel.RootModel,
|
|
49
|
+
field_model=typed_dict.DataModelField
|
|
50
|
+
if target_python_version.has_typed_dict_non_required
|
|
51
|
+
else typed_dict.DataModelFieldBackport,
|
|
52
|
+
data_type_manager=DataTypeManager,
|
|
53
|
+
dump_resolve_reference_action=None,
|
|
54
|
+
)
|
|
41
55
|
raise ValueError(
|
|
42
56
|
f'{data_model_type} is unsupported data model type'
|
|
43
57
|
) # pragma: no cover
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
from datamodel_code_generator.imports import Import
|
|
2
|
+
|
|
3
|
+
IMPORT_DATACLASS = Import.from_full_path('dataclasses.dataclass')
|
|
4
|
+
IMPORT_FIELD = Import.from_full_path('dataclasses.field')
|
|
5
|
+
IMPORT_TYPED_DICT = Import.from_full_path('typing.TypedDict')
|
|
6
|
+
IMPORT_TYPED_DICT_BACKPORT = Import.from_full_path('typing_extensions.TypedDict')
|
|
7
|
+
IMPORT_NOT_REQUIRED = Import.from_full_path('typing.NotRequired')
|
|
8
|
+
IMPORT_NOT_REQUIRED_BACKPORT = Import.from_full_path('typing_extensions.NotRequired')
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
class {{ class_name }}({{ base_class }}):
|
|
2
|
+
{%- if description %}
|
|
3
|
+
"""
|
|
4
|
+
{{ description | indent(4) }}
|
|
5
|
+
"""
|
|
6
|
+
{%- endif %}
|
|
7
|
+
{%- if not fields and not description %}
|
|
8
|
+
pass
|
|
9
|
+
{%- endif %}
|
|
10
|
+
{%- for field in fields %}
|
|
11
|
+
{{ field.name }}: {{ field.type_hint }}
|
|
12
|
+
{%- if field.docstring %}
|
|
13
|
+
"""
|
|
14
|
+
{{ field.docstring | indent(4) }}
|
|
15
|
+
"""
|
|
16
|
+
{%- endif %}
|
|
17
|
+
{%- endfor -%}
|
datamodel_code_generator-0.20.0/datamodel_code_generator/model/template/TypedDictFunction.jinja2
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
{%- if description %}
|
|
2
|
+
"""
|
|
3
|
+
{{ description | indent(4) }}
|
|
4
|
+
"""
|
|
5
|
+
{%- endif %}
|
|
6
|
+
{{ class_name }} = TypedDict('{{ class_name }}', {
|
|
7
|
+
{%- for field in all_fields %}
|
|
8
|
+
'{{ field.key }}': {{ field.type_hint }},
|
|
9
|
+
{%- if field.docstring %}
|
|
10
|
+
"""
|
|
11
|
+
{{ field.docstring | indent(4) }}
|
|
12
|
+
"""
|
|
13
|
+
{%- endif %}
|
|
14
|
+
{%- endfor -%}
|
|
15
|
+
})
|
|
16
|
+
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import keyword
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import (
|
|
6
|
+
Any,
|
|
7
|
+
ClassVar,
|
|
8
|
+
DefaultDict,
|
|
9
|
+
Dict,
|
|
10
|
+
Iterator,
|
|
11
|
+
List,
|
|
12
|
+
Optional,
|
|
13
|
+
Tuple,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
from datamodel_code_generator.imports import Import
|
|
17
|
+
from datamodel_code_generator.model import DataModel, DataModelFieldBase
|
|
18
|
+
from datamodel_code_generator.model.base import UNDEFINED
|
|
19
|
+
from datamodel_code_generator.model.improts import (
|
|
20
|
+
IMPORT_NOT_REQUIRED,
|
|
21
|
+
IMPORT_NOT_REQUIRED_BACKPORT,
|
|
22
|
+
IMPORT_TYPED_DICT,
|
|
23
|
+
IMPORT_TYPED_DICT_BACKPORT,
|
|
24
|
+
)
|
|
25
|
+
from datamodel_code_generator.reference import Reference
|
|
26
|
+
from datamodel_code_generator.types import NOT_REQUIRED_PREFIX
|
|
27
|
+
|
|
28
|
+
escape_characters = str.maketrans(
|
|
29
|
+
{
|
|
30
|
+
'\\': r'\\',
|
|
31
|
+
"'": r"\'",
|
|
32
|
+
'\b': r'\b',
|
|
33
|
+
'\f': r'\f',
|
|
34
|
+
'\n': r'\n',
|
|
35
|
+
'\r': r'\r',
|
|
36
|
+
'\t': r'\t',
|
|
37
|
+
}
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _is_valid_field_name(field: DataModelFieldBase) -> bool:
|
|
42
|
+
name = field.original_name or field.name
|
|
43
|
+
if name is None: # pragma: no cover
|
|
44
|
+
return False
|
|
45
|
+
return name.isidentifier() and not keyword.iskeyword(name)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
class TypedDict(DataModel):
|
|
49
|
+
TEMPLATE_FILE_PATH: ClassVar[str] = 'TypedDict.jinja2'
|
|
50
|
+
BASE_CLASS: ClassVar[str] = 'typing.TypedDict'
|
|
51
|
+
DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPED_DICT,)
|
|
52
|
+
|
|
53
|
+
def __init__(
|
|
54
|
+
self,
|
|
55
|
+
*,
|
|
56
|
+
reference: Reference,
|
|
57
|
+
fields: List[DataModelFieldBase],
|
|
58
|
+
decorators: Optional[List[str]] = None,
|
|
59
|
+
base_classes: Optional[List[Reference]] = None,
|
|
60
|
+
custom_base_class: Optional[str] = None,
|
|
61
|
+
custom_template_dir: Optional[Path] = None,
|
|
62
|
+
extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
|
|
63
|
+
methods: Optional[List[str]] = None,
|
|
64
|
+
path: Optional[Path] = None,
|
|
65
|
+
description: Optional[str] = None,
|
|
66
|
+
default: Any = UNDEFINED,
|
|
67
|
+
nullable: bool = False,
|
|
68
|
+
) -> None:
|
|
69
|
+
super().__init__(
|
|
70
|
+
reference=reference,
|
|
71
|
+
fields=fields,
|
|
72
|
+
decorators=decorators,
|
|
73
|
+
base_classes=base_classes,
|
|
74
|
+
custom_base_class=custom_base_class,
|
|
75
|
+
custom_template_dir=custom_template_dir,
|
|
76
|
+
extra_template_data=extra_template_data,
|
|
77
|
+
methods=methods,
|
|
78
|
+
path=path,
|
|
79
|
+
description=description,
|
|
80
|
+
default=default,
|
|
81
|
+
nullable=nullable,
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
@property
|
|
85
|
+
def is_functional_syntax(self) -> bool:
|
|
86
|
+
return any(not _is_valid_field_name(f) for f in self.fields)
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def all_fields(self) -> Iterator[DataModelFieldBase]:
|
|
90
|
+
for base_class in self.base_classes:
|
|
91
|
+
if base_class.reference is None: # pragma: no cover
|
|
92
|
+
continue
|
|
93
|
+
data_model = base_class.reference.source
|
|
94
|
+
if not isinstance(data_model, DataModel): # pragma: no cover
|
|
95
|
+
continue
|
|
96
|
+
|
|
97
|
+
if isinstance(data_model, TypedDict): # pragma: no cover
|
|
98
|
+
yield from data_model.all_fields
|
|
99
|
+
|
|
100
|
+
yield from self.fields
|
|
101
|
+
|
|
102
|
+
def render(self, *, class_name: Optional[str] = None) -> str:
|
|
103
|
+
response = self._render(
|
|
104
|
+
class_name=class_name or self.class_name,
|
|
105
|
+
fields=self.fields,
|
|
106
|
+
decorators=self.decorators,
|
|
107
|
+
base_class=self.base_class,
|
|
108
|
+
methods=self.methods,
|
|
109
|
+
description=self.description,
|
|
110
|
+
is_functional_syntax=self.is_functional_syntax,
|
|
111
|
+
all_fields=self.all_fields,
|
|
112
|
+
**self.extra_template_data,
|
|
113
|
+
)
|
|
114
|
+
return response
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class TypedDictBackport(TypedDict):
|
|
118
|
+
BASE_CLASS: ClassVar[str] = 'typing_extensions.TypedDict'
|
|
119
|
+
DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPED_DICT_BACKPORT,)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
class DataModelField(DataModelFieldBase):
|
|
123
|
+
DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_NOT_REQUIRED,)
|
|
124
|
+
|
|
125
|
+
@property
|
|
126
|
+
def key(self) -> str:
|
|
127
|
+
return (self.original_name or self.name or '').translate( # pragma: no cover
|
|
128
|
+
escape_characters
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
@property
|
|
132
|
+
def type_hint(self) -> str:
|
|
133
|
+
type_hint = super().type_hint
|
|
134
|
+
if self._not_required:
|
|
135
|
+
return f'{NOT_REQUIRED_PREFIX}{type_hint}]'
|
|
136
|
+
return type_hint
|
|
137
|
+
|
|
138
|
+
@property
|
|
139
|
+
def _not_required(self) -> bool:
|
|
140
|
+
return not self.required and isinstance(self.parent, TypedDict)
|
|
141
|
+
|
|
142
|
+
@property
|
|
143
|
+
def imports(self) -> Tuple[Import, ...]:
|
|
144
|
+
return (
|
|
145
|
+
*super().imports,
|
|
146
|
+
*(self.DEFAULT_IMPORTS if self._not_required else ()),
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
class DataModelFieldBackport(DataModelField):
|
|
151
|
+
DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_NOT_REQUIRED_BACKPORT,)
|
|
@@ -485,12 +485,7 @@ class Parser(ABC):
|
|
|
485
485
|
yield Source(path=Path(), text=self.source)
|
|
486
486
|
elif isinstance(self.source, Path): # pragma: no cover
|
|
487
487
|
if self.source.is_dir():
|
|
488
|
-
|
|
489
|
-
sorted(self.source.rglob('*'))
|
|
490
|
-
if self.keep_model_order
|
|
491
|
-
else self.source.rglob('*')
|
|
492
|
-
)
|
|
493
|
-
for path in paths:
|
|
488
|
+
for path in sorted(self.source.rglob('*'), key=lambda p: p.name):
|
|
494
489
|
if path.is_file():
|
|
495
490
|
yield Source.from_path(path, self.base_path, self.encoding)
|
|
496
491
|
else:
|
|
@@ -226,6 +226,7 @@ class JsonSchemaObject(BaseModel):
|
|
|
226
226
|
default: Any
|
|
227
227
|
id: Optional[str] = Field(default=None, alias='$id')
|
|
228
228
|
custom_type_path: Optional[str] = Field(default=None, alias='customTypePath')
|
|
229
|
+
custom_base_path: Optional[str] = Field(default=None, alias='customBasePath')
|
|
229
230
|
extras: Dict[str, Any] = Field(alias=__extra_key__, default_factory=dict)
|
|
230
231
|
discriminator: Union[Discriminator, str, None]
|
|
231
232
|
|
|
@@ -663,7 +664,7 @@ class JsonSchemaParser(Parser):
|
|
|
663
664
|
reference=reference,
|
|
664
665
|
fields=fields,
|
|
665
666
|
base_classes=base_classes,
|
|
666
|
-
custom_base_class=self.base_class,
|
|
667
|
+
custom_base_class=obj.custom_base_path or self.base_class,
|
|
667
668
|
custom_template_dir=self.custom_template_dir,
|
|
668
669
|
extra_template_data=self.extra_template_data,
|
|
669
670
|
path=self.current_source_path,
|
|
@@ -781,7 +782,7 @@ class JsonSchemaParser(Parser):
|
|
|
781
782
|
data_model_root = self.data_model_root_type(
|
|
782
783
|
reference=reference,
|
|
783
784
|
fields=[field],
|
|
784
|
-
custom_base_class=self.base_class,
|
|
785
|
+
custom_base_class=obj.custom_base_path or self.base_class,
|
|
785
786
|
custom_template_dir=self.custom_template_dir,
|
|
786
787
|
extra_template_data=self.extra_template_data,
|
|
787
788
|
path=self.current_source_path,
|
|
@@ -903,7 +904,7 @@ class JsonSchemaParser(Parser):
|
|
|
903
904
|
data_model_type = data_model_type_class(
|
|
904
905
|
reference=reference,
|
|
905
906
|
fields=fields,
|
|
906
|
-
custom_base_class=self.base_class,
|
|
907
|
+
custom_base_class=obj.custom_base_path or self.base_class,
|
|
907
908
|
custom_template_dir=self.custom_template_dir,
|
|
908
909
|
extra_template_data=self.extra_template_data,
|
|
909
910
|
path=self.current_source_path,
|
|
@@ -1153,7 +1154,7 @@ class JsonSchemaParser(Parser):
|
|
|
1153
1154
|
data_model_root = self.data_model_root_type(
|
|
1154
1155
|
reference=reference,
|
|
1155
1156
|
fields=[field],
|
|
1156
|
-
custom_base_class=self.base_class,
|
|
1157
|
+
custom_base_class=obj.custom_base_path or self.base_class,
|
|
1157
1158
|
custom_template_dir=self.custom_template_dir,
|
|
1158
1159
|
extra_template_data=self.extra_template_data,
|
|
1159
1160
|
path=self.current_source_path,
|
|
@@ -1233,7 +1234,7 @@ class JsonSchemaParser(Parser):
|
|
|
1233
1234
|
has_default=obj.has_default,
|
|
1234
1235
|
)
|
|
1235
1236
|
],
|
|
1236
|
-
custom_base_class=self.base_class,
|
|
1237
|
+
custom_base_class=obj.custom_base_path or self.base_class,
|
|
1237
1238
|
custom_template_dir=self.custom_template_dir,
|
|
1238
1239
|
extra_template_data=self.extra_template_data,
|
|
1239
1240
|
path=self.current_source_path,
|
|
@@ -1366,7 +1367,7 @@ class JsonSchemaParser(Parser):
|
|
|
1366
1367
|
original_name=None,
|
|
1367
1368
|
)
|
|
1368
1369
|
],
|
|
1369
|
-
custom_base_class=self.base_class,
|
|
1370
|
+
custom_base_class=obj.custom_base_path or self.base_class,
|
|
1370
1371
|
custom_template_dir=self.custom_template_dir,
|
|
1371
1372
|
extra_template_data=self.extra_template_data,
|
|
1372
1373
|
path=self.current_source_path,
|
|
@@ -1623,6 +1624,8 @@ class JsonSchemaParser(Parser):
|
|
|
1623
1624
|
path, obj_name, unique=False, class_name=True
|
|
1624
1625
|
).name
|
|
1625
1626
|
with self.root_id_context(raw):
|
|
1627
|
+
# Some jsonschema docs include attribute self to have include version details
|
|
1628
|
+
raw.pop('self', None)
|
|
1626
1629
|
# parse $id before parsing $ref
|
|
1627
1630
|
root_obj = JsonSchemaObject.parse_obj(raw)
|
|
1628
1631
|
self.parse_id(root_obj, path_parts)
|
|
@@ -555,6 +555,9 @@ class OpenAPIParser(JsonSchemaParser):
|
|
|
555
555
|
]
|
|
556
556
|
paths_path = [*path_parts, '#/paths']
|
|
557
557
|
for path_name, methods in paths.items():
|
|
558
|
+
# Resolve path items if applicable
|
|
559
|
+
if '$ref' in methods:
|
|
560
|
+
methods = self.get_ref_model(methods['$ref'])
|
|
558
561
|
paths_parameters = parameters[:]
|
|
559
562
|
if 'parameters' in methods:
|
|
560
563
|
paths_parameters.extend(methods['parameters'])
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
version: str = '0.20.0'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "datamodel-code-generator"
|
|
3
|
-
version = "0.
|
|
3
|
+
version = "0.20.0"
|
|
4
4
|
description = "Datamodel Code Generator"
|
|
5
5
|
authors = ["Koudai Aono <koxudaxi@gmail.com>"]
|
|
6
6
|
readme = "README.md"
|
|
@@ -48,7 +48,7 @@ pydantic = [
|
|
|
48
48
|
]
|
|
49
49
|
argcomplete = ">=1.10,<4.0"
|
|
50
50
|
prance = ">=0.18.2,<1.0"
|
|
51
|
-
openapi-spec-validator = ">=0.2.8,<=0.5.
|
|
51
|
+
openapi-spec-validator = ">=0.2.8,<=0.5.2"
|
|
52
52
|
jinja2 = ">=2.10.1,<4.0"
|
|
53
53
|
inflect = ">=4.1.0,<6.0"
|
|
54
54
|
black = ">=19.10b0"
|
|
@@ -64,7 +64,7 @@ pytest = ">6.0"
|
|
|
64
64
|
pytest-benchmark = "*"
|
|
65
65
|
pytest-cov = ">=2.12.1"
|
|
66
66
|
pytest-mock = "*"
|
|
67
|
-
mypy = "
|
|
67
|
+
mypy = ">=1.0.1,<1.4.0"
|
|
68
68
|
black = "^23.3.0"
|
|
69
69
|
freezegun = "*"
|
|
70
70
|
types-Jinja2 = "*"
|
|
@@ -83,11 +83,9 @@ line-length = 88
|
|
|
83
83
|
extend-select = ['Q', 'RUF100', 'C4', 'UP', 'I']
|
|
84
84
|
flake8-quotes = {inline-quotes = 'single', multiline-quotes = 'double'}
|
|
85
85
|
target-version = 'py37'
|
|
86
|
-
ignore = ['E501']
|
|
86
|
+
ignore = ['E501', 'UP006', 'UP007']
|
|
87
87
|
extend-exclude = ['tests/data']
|
|
88
88
|
|
|
89
|
-
[tool.ruff.pyupgrade]
|
|
90
|
-
keep-runtime-typing = true
|
|
91
89
|
|
|
92
90
|
[tool.black]
|
|
93
91
|
line-length = 88
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
version: str = '0.19.0'
|
|
File without changes
|
{datamodel_code_generator-0.19.0 → datamodel_code_generator-0.20.0}/datamodel_code_generator/http.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|