datamodel-code-generator 0.26.5__py3-none-any.whl → 0.27.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamodel-code-generator might be problematic. Click here for more details.
- datamodel_code_generator/__init__.py +11 -3
- datamodel_code_generator/__main__.py +14 -12
- datamodel_code_generator/http.py +2 -1
- datamodel_code_generator/imports.py +1 -1
- datamodel_code_generator/model/__init__.py +3 -1
- datamodel_code_generator/model/base.py +7 -5
- datamodel_code_generator/model/enum.py +1 -1
- datamodel_code_generator/model/pydantic/base_model.py +1 -1
- datamodel_code_generator/model/pydantic/types.py +1 -1
- datamodel_code_generator/model/pydantic_v2/base_model.py +2 -2
- datamodel_code_generator/model/pydantic_v2/types.py +4 -1
- datamodel_code_generator/parser/base.py +17 -7
- datamodel_code_generator/parser/graphql.py +4 -4
- datamodel_code_generator/parser/jsonschema.py +5 -4
- datamodel_code_generator/parser/openapi.py +7 -5
- datamodel_code_generator/pydantic_patch.py +1 -1
- datamodel_code_generator/reference.py +5 -5
- datamodel_code_generator/types.py +14 -12
- datamodel_code_generator/util.py +7 -11
- {datamodel_code_generator-0.26.5.dist-info → datamodel_code_generator-0.27.1.dist-info}/METADATA +31 -28
- {datamodel_code_generator-0.26.5.dist-info → datamodel_code_generator-0.27.1.dist-info}/RECORD +31 -32
- {datamodel_code_generator-0.26.5.dist-info → datamodel_code_generator-0.27.1.dist-info}/WHEEL +1 -1
- datamodel_code_generator-0.27.1.dist-info/entry_points.txt +2 -0
- datamodel_code_generator/version.py +0 -1
- datamodel_code_generator-0.26.5.dist-info/entry_points.txt +0 -3
- {datamodel_code_generator-0.26.5.dist-info → datamodel_code_generator-0.27.1.dist-info/licenses}/LICENSE +0 -0
|
@@ -81,6 +81,9 @@ def enable_debug_message() -> None: # pragma: no cover
|
|
|
81
81
|
pysnooper.tracer.DISABLED = False
|
|
82
82
|
|
|
83
83
|
|
|
84
|
+
DEFAULT_MAX_VARIABLE_LENGTH: int = 100
|
|
85
|
+
|
|
86
|
+
|
|
84
87
|
def snooper_to_methods( # type: ignore
|
|
85
88
|
output=None,
|
|
86
89
|
watch=(),
|
|
@@ -90,7 +93,7 @@ def snooper_to_methods( # type: ignore
|
|
|
90
93
|
overwrite=False,
|
|
91
94
|
thread_info=False,
|
|
92
95
|
custom_repr=(),
|
|
93
|
-
max_variable_length=
|
|
96
|
+
max_variable_length: Optional[int] = DEFAULT_MAX_VARIABLE_LENGTH,
|
|
94
97
|
) -> Callable[..., Any]:
|
|
95
98
|
def inner(cls: Type[T]) -> Type[T]:
|
|
96
99
|
if not pysnooper:
|
|
@@ -108,7 +111,9 @@ def snooper_to_methods( # type: ignore
|
|
|
108
111
|
overwrite,
|
|
109
112
|
thread_info,
|
|
110
113
|
custom_repr,
|
|
111
|
-
max_variable_length
|
|
114
|
+
max_variable_length
|
|
115
|
+
if max_variable_length is not None
|
|
116
|
+
else DEFAULT_MAX_VARIABLE_LENGTH,
|
|
112
117
|
)(method)
|
|
113
118
|
setattr(cls, name, snooper_method)
|
|
114
119
|
return cls
|
|
@@ -424,8 +429,10 @@ def generate(
|
|
|
424
429
|
data_model_types = get_data_model_types(
|
|
425
430
|
output_model_type, target_python_version, output_datetime_class
|
|
426
431
|
)
|
|
432
|
+
source = input_text or input_
|
|
433
|
+
assert not isinstance(source, Mapping)
|
|
427
434
|
parser = parser_class(
|
|
428
|
-
source=
|
|
435
|
+
source=source,
|
|
429
436
|
data_model_type=data_model_types.data_model,
|
|
430
437
|
data_model_root_type=data_model_types.root_model,
|
|
431
438
|
data_model_field_type=data_model_types.field_model,
|
|
@@ -514,6 +521,7 @@ def generate(
|
|
|
514
521
|
# input_ might be a dict object provided directly, and missing a name field
|
|
515
522
|
input_filename = getattr(input_, 'name', '<dict>')
|
|
516
523
|
else:
|
|
524
|
+
assert isinstance(input_, Path)
|
|
517
525
|
input_filename = input_.name
|
|
518
526
|
if not results:
|
|
519
527
|
raise Error('Models not found in the input data')
|
|
@@ -85,7 +85,7 @@ signal.signal(signal.SIGINT, sig_int_handler)
|
|
|
85
85
|
|
|
86
86
|
class Config(BaseModel):
|
|
87
87
|
if PYDANTIC_V2:
|
|
88
|
-
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
88
|
+
model_config = ConfigDict(arbitrary_types_allowed=True) # pyright: ignore [reportAssignmentType]
|
|
89
89
|
|
|
90
90
|
def get(self, item: str) -> Any:
|
|
91
91
|
return getattr(self, item)
|
|
@@ -185,8 +185,8 @@ class Config(BaseModel):
|
|
|
185
185
|
|
|
186
186
|
@model_validator(mode='after')
|
|
187
187
|
def validate_keyword_only(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
|
188
|
-
output_model_type: DataModelType = values.get('output_model_type')
|
|
189
|
-
python_target: PythonVersion = values.get('target_python_version')
|
|
188
|
+
output_model_type: DataModelType = values.get('output_model_type') # pyright: ignore [reportAssignmentType]
|
|
189
|
+
python_target: PythonVersion = values.get('target_python_version') # pyright: ignore [reportAssignmentType]
|
|
190
190
|
if (
|
|
191
191
|
values.get('keyword_only')
|
|
192
192
|
and output_model_type == DataModelType.DataclassesDataclass
|
|
@@ -219,7 +219,7 @@ class Config(BaseModel):
|
|
|
219
219
|
def validate_each_item(each_item: Any) -> Tuple[str, str]:
|
|
220
220
|
if isinstance(each_item, str): # pragma: no cover
|
|
221
221
|
try:
|
|
222
|
-
field_name, field_value = each_item.split(':', maxsplit=1)
|
|
222
|
+
field_name, field_value = each_item.split(':', maxsplit=1)
|
|
223
223
|
return field_name, field_value.lstrip()
|
|
224
224
|
except ValueError:
|
|
225
225
|
raise Error(f'Invalid http header: {each_item!r}')
|
|
@@ -236,7 +236,7 @@ class Config(BaseModel):
|
|
|
236
236
|
def validate_each_item(each_item: Any) -> Tuple[str, str]:
|
|
237
237
|
if isinstance(each_item, str): # pragma: no cover
|
|
238
238
|
try:
|
|
239
|
-
field_name, field_value = each_item.split('=', maxsplit=1)
|
|
239
|
+
field_name, field_value = each_item.split('=', maxsplit=1)
|
|
240
240
|
return field_name, field_value.lstrip()
|
|
241
241
|
except ValueError:
|
|
242
242
|
raise Error(f'Invalid http query parameter: {each_item!r}')
|
|
@@ -248,14 +248,16 @@ class Config(BaseModel):
|
|
|
248
248
|
|
|
249
249
|
@model_validator(mode='before')
|
|
250
250
|
def validate_additional_imports(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
|
251
|
-
|
|
252
|
-
|
|
251
|
+
additional_imports = values.get('additional_imports')
|
|
252
|
+
if additional_imports is not None:
|
|
253
|
+
values['additional_imports'] = additional_imports.split(',')
|
|
253
254
|
return values
|
|
254
255
|
|
|
255
256
|
@model_validator(mode='before')
|
|
256
257
|
def validate_custom_formatters(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
|
257
|
-
|
|
258
|
-
|
|
258
|
+
custom_formatters = values.get('custom_formatters')
|
|
259
|
+
if custom_formatters is not None:
|
|
260
|
+
values['custom_formatters'] = custom_formatters.split(',')
|
|
259
261
|
return values
|
|
260
262
|
|
|
261
263
|
if PYDANTIC_V2:
|
|
@@ -282,7 +284,7 @@ class Config(BaseModel):
|
|
|
282
284
|
disable_warnings: bool = False
|
|
283
285
|
target_python_version: PythonVersion = PythonVersion.PY_38
|
|
284
286
|
base_class: str = ''
|
|
285
|
-
additional_imports: Optional[List[str]] =
|
|
287
|
+
additional_imports: Optional[List[str]] = None
|
|
286
288
|
custom_template_dir: Optional[Path] = None
|
|
287
289
|
extra_template_data: Optional[TextIOBase] = None
|
|
288
290
|
validation: bool = False
|
|
@@ -397,9 +399,9 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
|
|
|
397
399
|
arg_parser.parse_args(args, namespace=namespace)
|
|
398
400
|
|
|
399
401
|
if namespace.version:
|
|
400
|
-
from datamodel_code_generator
|
|
402
|
+
from datamodel_code_generator import get_version
|
|
401
403
|
|
|
402
|
-
print(
|
|
404
|
+
print(get_version())
|
|
403
405
|
exit(0)
|
|
404
406
|
|
|
405
407
|
pyproject_config = _get_pyproject_toml_config(Path().resolve())
|
datamodel_code_generator/http.py
CHANGED
|
@@ -28,12 +28,14 @@ class DataModelSet(NamedTuple):
|
|
|
28
28
|
def get_data_model_types(
|
|
29
29
|
data_model_type: DataModelType,
|
|
30
30
|
target_python_version: PythonVersion = DEFAULT_TARGET_PYTHON_VERSION,
|
|
31
|
-
target_datetime_class: DatetimeClassType =
|
|
31
|
+
target_datetime_class: Optional[DatetimeClassType] = None,
|
|
32
32
|
) -> DataModelSet:
|
|
33
33
|
from .. import DataModelType
|
|
34
34
|
from . import dataclass, msgspec, pydantic, pydantic_v2, rootmodel, typed_dict
|
|
35
35
|
from .types import DataTypeManager
|
|
36
36
|
|
|
37
|
+
if target_datetime_class is None:
|
|
38
|
+
target_datetime_class = DEFAULT_TARGET_DATETIME_CLASS
|
|
37
39
|
if data_model_type == DataModelType.PydanticBaseModel:
|
|
38
40
|
return DataModelSet(
|
|
39
41
|
data_model=pydantic.BaseModel,
|
|
@@ -53,7 +53,7 @@ class ConstraintsBase(_BaseModel):
|
|
|
53
53
|
unique_items: Optional[bool] = Field(None, alias='uniqueItems')
|
|
54
54
|
_exclude_fields: ClassVar[Set[str]] = {'has_constraints'}
|
|
55
55
|
if PYDANTIC_V2:
|
|
56
|
-
model_config = ConfigDict(
|
|
56
|
+
model_config = ConfigDict( # pyright: ignore [reportAssignmentType]
|
|
57
57
|
arbitrary_types_allowed=True, ignored_types=(cached_property,)
|
|
58
58
|
)
|
|
59
59
|
else:
|
|
@@ -87,7 +87,9 @@ class ConstraintsBase(_BaseModel):
|
|
|
87
87
|
else:
|
|
88
88
|
model_field_constraints = {}
|
|
89
89
|
|
|
90
|
-
if
|
|
90
|
+
if constraints_class is None or not issubclass(
|
|
91
|
+
constraints_class, ConstraintsBase
|
|
92
|
+
): # pragma: no cover
|
|
91
93
|
return None
|
|
92
94
|
|
|
93
95
|
return constraints_class.parse_obj(
|
|
@@ -165,7 +167,7 @@ class DataModelFieldBase(_BaseModel):
|
|
|
165
167
|
type_hint = self.type_hint
|
|
166
168
|
has_union = not self.data_type.use_union_operator and UNION_PREFIX in type_hint
|
|
167
169
|
imports: List[Union[Tuple[Import], Iterator[Import]]] = [
|
|
168
|
-
(
|
|
170
|
+
iter(
|
|
169
171
|
i
|
|
170
172
|
for i in self.data_type.all_imports
|
|
171
173
|
if not (not has_union and i == IMPORT_UNION)
|
|
@@ -229,7 +231,7 @@ class DataModelFieldBase(_BaseModel):
|
|
|
229
231
|
return True
|
|
230
232
|
|
|
231
233
|
|
|
232
|
-
@lru_cache
|
|
234
|
+
@lru_cache
|
|
233
235
|
def get_template(template_file_path: Path) -> Template:
|
|
234
236
|
loader = FileSystemLoader(str(TEMPLATE_DIR / template_file_path.parent))
|
|
235
237
|
environment: Environment = Environment(loader=loader)
|
|
@@ -251,7 +253,7 @@ def get_module_name(name: str, file_path: Optional[Path]) -> str:
|
|
|
251
253
|
|
|
252
254
|
|
|
253
255
|
class TemplateBase(ABC):
|
|
254
|
-
@
|
|
256
|
+
@cached_property
|
|
255
257
|
@abstractmethod
|
|
256
258
|
def template_file_path(self) -> Path:
|
|
257
259
|
raise NotImplementedError
|
|
@@ -87,7 +87,7 @@ class Enum(DataModel):
|
|
|
87
87
|
|
|
88
88
|
for field in self.fields:
|
|
89
89
|
# Remove surrounding quotes from field default value
|
|
90
|
-
field_default = field.default.strip('\'"')
|
|
90
|
+
field_default = (field.default or '').strip('\'"')
|
|
91
91
|
|
|
92
92
|
# Compare values after removing quotes
|
|
93
93
|
if field_default == str_value:
|
|
@@ -322,4 +322,4 @@ class BaseModel(BaseModelBase):
|
|
|
322
322
|
if config_parameters:
|
|
323
323
|
from datamodel_code_generator.model.pydantic import Config
|
|
324
324
|
|
|
325
|
-
self.extra_template_data['config'] = Config.parse_obj(config_parameters)
|
|
325
|
+
self.extra_template_data['config'] = Config.parse_obj(config_parameters) # pyright: ignore [reportArgumentType]
|
|
@@ -180,7 +180,7 @@ class DataTypeManager(_DataTypeManager):
|
|
|
180
180
|
self.data_type,
|
|
181
181
|
strict_types=self.strict_types,
|
|
182
182
|
pattern_key=self.PATTERN_KEY,
|
|
183
|
-
target_datetime_class=target_datetime_class,
|
|
183
|
+
target_datetime_class=self.target_datetime_class,
|
|
184
184
|
)
|
|
185
185
|
self.strict_type_map: Dict[StrictTypes, DataType] = strict_type_map_factory(
|
|
186
186
|
self.data_type,
|
|
@@ -98,7 +98,7 @@ class DataModelField(DataModelFieldV1):
|
|
|
98
98
|
'max_length',
|
|
99
99
|
'union_mode',
|
|
100
100
|
}
|
|
101
|
-
constraints: Optional[Constraints] = None
|
|
101
|
+
constraints: Optional[Constraints] = None # pyright: ignore [reportIncompatibleVariableOverride]
|
|
102
102
|
_PARSE_METHOD: ClassVar[str] = 'model_validate'
|
|
103
103
|
can_have_extra_keys: ClassVar[bool] = False
|
|
104
104
|
|
|
@@ -234,7 +234,7 @@ class BaseModel(BaseModelBase):
|
|
|
234
234
|
if config_parameters:
|
|
235
235
|
from datamodel_code_generator.model.pydantic_v2 import ConfigDict
|
|
236
236
|
|
|
237
|
-
self.extra_template_data['config'] = ConfigDict.parse_obj(config_parameters)
|
|
237
|
+
self.extra_template_data['config'] = ConfigDict.parse_obj(config_parameters) # pyright: ignore [reportArgumentType]
|
|
238
238
|
self._additional_imports.append(IMPORT_CONFIG_DICT)
|
|
239
239
|
|
|
240
240
|
def _get_config_extra(self) -> Optional[Literal["'allow'", "'forbid'"]]:
|
|
@@ -24,7 +24,10 @@ class DataTypeManager(_DataTypeManager):
|
|
|
24
24
|
) -> Dict[Types, DataType]:
|
|
25
25
|
result = {
|
|
26
26
|
**super().type_map_factory(
|
|
27
|
-
data_type,
|
|
27
|
+
data_type,
|
|
28
|
+
strict_types,
|
|
29
|
+
pattern_key,
|
|
30
|
+
target_datetime_class or DatetimeClassType.Datetime,
|
|
28
31
|
),
|
|
29
32
|
Types.hostname: self.data_type.from_import(
|
|
30
33
|
IMPORT_CONSTR,
|
|
@@ -411,7 +411,7 @@ class Parser(ABC):
|
|
|
411
411
|
treat_dots_as_module: bool = False,
|
|
412
412
|
use_exact_imports: bool = False,
|
|
413
413
|
default_field_extras: Optional[Dict[str, Any]] = None,
|
|
414
|
-
target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
|
|
414
|
+
target_datetime_class: Optional[DatetimeClassType] = DatetimeClassType.Datetime,
|
|
415
415
|
keyword_only: bool = False,
|
|
416
416
|
no_alias: bool = False,
|
|
417
417
|
) -> None:
|
|
@@ -668,7 +668,16 @@ class Parser(ABC):
|
|
|
668
668
|
for model, duplicate_models in model_to_duplicate_models.items():
|
|
669
669
|
for duplicate_model in duplicate_models:
|
|
670
670
|
for child in duplicate_model.reference.children[:]:
|
|
671
|
-
child
|
|
671
|
+
if isinstance(child, DataType):
|
|
672
|
+
child.replace_reference(model.reference)
|
|
673
|
+
# simplify if introduce duplicate base classes
|
|
674
|
+
if isinstance(child, DataModel):
|
|
675
|
+
child.base_classes = list(
|
|
676
|
+
{
|
|
677
|
+
f'{c.module_name}.{c.type_hint}': c
|
|
678
|
+
for c in child.base_classes
|
|
679
|
+
}.values()
|
|
680
|
+
)
|
|
672
681
|
models.remove(duplicate_model)
|
|
673
682
|
|
|
674
683
|
@classmethod
|
|
@@ -849,12 +858,12 @@ class Parser(ABC):
|
|
|
849
858
|
|
|
850
859
|
# Check the main discriminator model path
|
|
851
860
|
if mapping:
|
|
852
|
-
check_paths(discriminator_model, mapping)
|
|
861
|
+
check_paths(discriminator_model, mapping) # pyright: ignore [reportArgumentType]
|
|
853
862
|
|
|
854
863
|
# Check the base_classes if they exist
|
|
855
864
|
if len(type_names) == 0:
|
|
856
865
|
for base_class in discriminator_model.base_classes:
|
|
857
|
-
check_paths(base_class.reference, mapping)
|
|
866
|
+
check_paths(base_class.reference, mapping) # pyright: ignore [reportArgumentType]
|
|
858
867
|
else:
|
|
859
868
|
type_names = [discriminator_model.path.split('/')[-1]]
|
|
860
869
|
if not type_names: # pragma: no cover
|
|
@@ -1038,7 +1047,7 @@ class Parser(ABC):
|
|
|
1038
1047
|
and any(
|
|
1039
1048
|
d
|
|
1040
1049
|
for d in model_field.data_type.all_data_types
|
|
1041
|
-
if d.is_dict or d.is_union
|
|
1050
|
+
if d.is_dict or d.is_union or d.is_list
|
|
1042
1051
|
)
|
|
1043
1052
|
):
|
|
1044
1053
|
continue # pragma: no cover
|
|
@@ -1061,7 +1070,7 @@ class Parser(ABC):
|
|
|
1061
1070
|
|
|
1062
1071
|
data_type.parent.data_type = copied_data_type
|
|
1063
1072
|
|
|
1064
|
-
elif data_type.parent.is_list:
|
|
1073
|
+
elif data_type.parent is not None and data_type.parent.is_list:
|
|
1065
1074
|
if self.field_constraints:
|
|
1066
1075
|
model_field.constraints = ConstraintsBase.merge_constraints(
|
|
1067
1076
|
root_type_field.constraints, model_field.constraints
|
|
@@ -1073,6 +1082,7 @@ class Parser(ABC):
|
|
|
1073
1082
|
discriminator = root_type_field.extras.get('discriminator')
|
|
1074
1083
|
if discriminator:
|
|
1075
1084
|
model_field.extras['discriminator'] = discriminator
|
|
1085
|
+
assert isinstance(data_type.parent, DataType)
|
|
1076
1086
|
data_type.parent.data_types.remove(
|
|
1077
1087
|
data_type
|
|
1078
1088
|
) # pragma: no cover
|
|
@@ -1358,7 +1368,7 @@ class Parser(ABC):
|
|
|
1358
1368
|
module_to_import: Dict[Tuple[str, ...], Imports] = {}
|
|
1359
1369
|
|
|
1360
1370
|
previous_module = () # type: Tuple[str, ...]
|
|
1361
|
-
for module, models in ((k, [*v]) for k, v in grouped_models):
|
|
1371
|
+
for module, models in ((k, [*v]) for k, v in grouped_models):
|
|
1362
1372
|
for model in models:
|
|
1363
1373
|
model_to_module_models[model] = module, models
|
|
1364
1374
|
self.__delete_duplicate_models(models)
|
|
@@ -373,7 +373,7 @@ class GraphQLParser(Parser):
|
|
|
373
373
|
def parse_field(
|
|
374
374
|
self,
|
|
375
375
|
field_name: str,
|
|
376
|
-
alias: str,
|
|
376
|
+
alias: Optional[str],
|
|
377
377
|
field: Union[graphql.GraphQLField, graphql.GraphQLInputField],
|
|
378
378
|
) -> DataModelFieldBase:
|
|
379
379
|
final_data_type = DataType(
|
|
@@ -399,9 +399,9 @@ class GraphQLParser(Parser):
|
|
|
399
399
|
elif graphql.is_non_null_type(obj): # pragma: no cover
|
|
400
400
|
data_type.is_optional = False
|
|
401
401
|
|
|
402
|
-
obj = obj.of_type
|
|
402
|
+
obj = obj.of_type # pyright: ignore [reportAttributeAccessIssue]
|
|
403
403
|
|
|
404
|
-
data_type.type = obj.name
|
|
404
|
+
data_type.type = obj.name # pyright: ignore [reportAttributeAccessIssue]
|
|
405
405
|
|
|
406
406
|
required = (not self.force_optional_for_required_fields) and (
|
|
407
407
|
not final_data_type.is_optional
|
|
@@ -456,7 +456,7 @@ class GraphQLParser(Parser):
|
|
|
456
456
|
|
|
457
457
|
base_classes = []
|
|
458
458
|
if hasattr(obj, 'interfaces'): # pragma: no cover
|
|
459
|
-
base_classes = [self.references[i.name] for i in obj.interfaces]
|
|
459
|
+
base_classes = [self.references[i.name] for i in obj.interfaces] # pyright: ignore [reportAttributeAccessIssue]
|
|
460
460
|
|
|
461
461
|
data_model_type = self.data_model_type(
|
|
462
462
|
reference=self.references[obj.name],
|
|
@@ -258,7 +258,7 @@ class JsonSchemaObject(BaseModel):
|
|
|
258
258
|
extras: Dict[str, Any] = Field(alias=__extra_key__, default_factory=dict)
|
|
259
259
|
discriminator: Union[Discriminator, str, None] = None
|
|
260
260
|
if PYDANTIC_V2:
|
|
261
|
-
model_config = ConfigDict(
|
|
261
|
+
model_config = ConfigDict( # pyright: ignore [reportPossiblyUnboundVariable]
|
|
262
262
|
arbitrary_types_allowed=True,
|
|
263
263
|
ignored_types=(cached_property,),
|
|
264
264
|
)
|
|
@@ -320,7 +320,7 @@ class JsonSchemaObject(BaseModel):
|
|
|
320
320
|
return isinstance(self.type, list) and 'null' in self.type
|
|
321
321
|
|
|
322
322
|
|
|
323
|
-
@lru_cache
|
|
323
|
+
@lru_cache
|
|
324
324
|
def get_ref_type(ref: str) -> JSONReference:
|
|
325
325
|
if ref[0] == '#':
|
|
326
326
|
return JSONReference.LOCAL
|
|
@@ -360,7 +360,7 @@ EXCLUDE_FIELD_KEYS_IN_JSON_SCHEMA: Set[str] = {
|
|
|
360
360
|
}
|
|
361
361
|
|
|
362
362
|
EXCLUDE_FIELD_KEYS = (
|
|
363
|
-
set(JsonSchemaObject.get_fields())
|
|
363
|
+
set(JsonSchemaObject.get_fields()) # pyright: ignore [reportAttributeAccessIssue]
|
|
364
364
|
- DEFAULT_FIELD_KEYS
|
|
365
365
|
- EXCLUDE_FIELD_KEYS_IN_JSON_SCHEMA
|
|
366
366
|
) | {
|
|
@@ -528,7 +528,7 @@ class JsonSchemaParser(Parser):
|
|
|
528
528
|
self.raw_obj: Dict[Any, Any] = {}
|
|
529
529
|
self._root_id: Optional[str] = None
|
|
530
530
|
self._root_id_base_path: Optional[str] = None
|
|
531
|
-
self.reserved_refs: DefaultDict[Tuple[str], Set[str]] = defaultdict(set)
|
|
531
|
+
self.reserved_refs: DefaultDict[Tuple[str, ...], Set[str]] = defaultdict(set)
|
|
532
532
|
self.field_keys: Set[str] = {
|
|
533
533
|
*DEFAULT_FIELD_KEYS,
|
|
534
534
|
*self.field_extra_keys,
|
|
@@ -1798,6 +1798,7 @@ class JsonSchemaParser(Parser):
|
|
|
1798
1798
|
root_obj = self.SCHEMA_OBJECT_TYPE.parse_obj(raw)
|
|
1799
1799
|
self.parse_id(root_obj, path_parts)
|
|
1800
1800
|
definitions: Optional[Dict[Any, Any]] = None
|
|
1801
|
+
schema_path = ''
|
|
1801
1802
|
for schema_path, split_schema_path in self.schema_paths:
|
|
1802
1803
|
try:
|
|
1803
1804
|
definitions = get_model_by_path(raw, split_schema_path)
|
|
@@ -367,7 +367,7 @@ class OpenAPIParser(JsonSchemaParser):
|
|
|
367
367
|
for (
|
|
368
368
|
media_type,
|
|
369
369
|
media_obj,
|
|
370
|
-
) in request_body.content.items():
|
|
370
|
+
) in request_body.content.items():
|
|
371
371
|
if isinstance(media_obj.schema_, JsonSchemaObject):
|
|
372
372
|
self.parse_schema(name, media_obj.schema_, [*path, media_type])
|
|
373
373
|
|
|
@@ -400,11 +400,13 @@ class OpenAPIParser(JsonSchemaParser):
|
|
|
400
400
|
if not object_schema: # pragma: no cover
|
|
401
401
|
continue
|
|
402
402
|
if isinstance(object_schema, JsonSchemaObject):
|
|
403
|
-
data_types[status_code][content_type] = self.parse_schema(
|
|
404
|
-
name,
|
|
403
|
+
data_types[status_code][content_type] = self.parse_schema( # pyright: ignore [reportArgumentType]
|
|
404
|
+
name,
|
|
405
|
+
object_schema,
|
|
406
|
+
[*path, str(status_code), content_type], # pyright: ignore [reportArgumentType]
|
|
405
407
|
)
|
|
406
408
|
else:
|
|
407
|
-
data_types[status_code][content_type] = self.get_ref_data_type(
|
|
409
|
+
data_types[status_code][content_type] = self.get_ref_data_type( # pyright: ignore [reportArgumentType]
|
|
408
410
|
object_schema.ref
|
|
409
411
|
)
|
|
410
412
|
|
|
@@ -604,7 +606,7 @@ class OpenAPIParser(JsonSchemaParser):
|
|
|
604
606
|
for (
|
|
605
607
|
obj_name,
|
|
606
608
|
raw_obj,
|
|
607
|
-
) in schemas.items():
|
|
609
|
+
) in schemas.items():
|
|
608
610
|
self.parse_raw_obj(
|
|
609
611
|
obj_name,
|
|
610
612
|
raw_obj,
|
|
@@ -19,4 +19,4 @@ def patched_evaluate_forwardref(
|
|
|
19
19
|
|
|
20
20
|
# Patch only Python3.12
|
|
21
21
|
if sys.version_info >= (3, 12):
|
|
22
|
-
pydantic.typing.evaluate_forwardref = patched_evaluate_forwardref
|
|
22
|
+
pydantic.typing.evaluate_forwardref = patched_evaluate_forwardref # pyright: ignore [reportAttributeAccessIssue]
|
|
@@ -138,7 +138,7 @@ class Reference(_BaseModel):
|
|
|
138
138
|
if PYDANTIC_V2:
|
|
139
139
|
# TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
|
|
140
140
|
# Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
|
|
141
|
-
model_config = ConfigDict(
|
|
141
|
+
model_config = ConfigDict( # pyright: ignore [reportAssignmentType]
|
|
142
142
|
arbitrary_types_allowed=True,
|
|
143
143
|
ignored_types=(cached_property,),
|
|
144
144
|
revalidate_instances='never',
|
|
@@ -182,7 +182,7 @@ _UNDER_SCORE_1: Pattern[str] = re.compile(r'([^_])([A-Z][a-z]+)')
|
|
|
182
182
|
_UNDER_SCORE_2: Pattern[str] = re.compile('([a-z0-9])([A-Z])')
|
|
183
183
|
|
|
184
184
|
|
|
185
|
-
@lru_cache
|
|
185
|
+
@lru_cache
|
|
186
186
|
def camel_to_snake(string: str) -> str:
|
|
187
187
|
subbed = _UNDER_SCORE_1.sub(r'\1_\2', string)
|
|
188
188
|
return _UNDER_SCORE_2.sub(r'\1_\2', subbed).lower()
|
|
@@ -750,15 +750,15 @@ class ModelResolver:
|
|
|
750
750
|
)
|
|
751
751
|
|
|
752
752
|
|
|
753
|
-
@lru_cache
|
|
753
|
+
@lru_cache
|
|
754
754
|
def get_singular_name(name: str, suffix: str = SINGULAR_NAME_SUFFIX) -> str:
|
|
755
755
|
singular_name = inflect_engine.singular_noun(name)
|
|
756
756
|
if singular_name is False:
|
|
757
757
|
singular_name = f'{name}{suffix}'
|
|
758
|
-
return singular_name
|
|
758
|
+
return singular_name # pyright: ignore [reportReturnType]
|
|
759
759
|
|
|
760
760
|
|
|
761
|
-
@lru_cache
|
|
761
|
+
@lru_cache
|
|
762
762
|
def snake_to_upper_camel(word: str, delimiter: str = '_') -> str:
|
|
763
763
|
prefix = ''
|
|
764
764
|
if word.startswith(delimiter):
|
|
@@ -114,25 +114,25 @@ class UnionIntFloat:
|
|
|
114
114
|
def __get_pydantic_core_schema__(
|
|
115
115
|
cls, _source_type: Any, _handler: 'GetCoreSchemaHandler'
|
|
116
116
|
) -> 'core_schema.CoreSchema':
|
|
117
|
-
from_int_schema = core_schema.chain_schema(
|
|
117
|
+
from_int_schema = core_schema.chain_schema( # pyright: ignore [reportPossiblyUnboundVariable]
|
|
118
118
|
[
|
|
119
|
-
core_schema.union_schema(
|
|
120
|
-
[core_schema.int_schema(), core_schema.float_schema()]
|
|
119
|
+
core_schema.union_schema( # pyright: ignore [reportPossiblyUnboundVariable]
|
|
120
|
+
[core_schema.int_schema(), core_schema.float_schema()] # pyright: ignore [reportPossiblyUnboundVariable]
|
|
121
121
|
),
|
|
122
|
-
core_schema.no_info_plain_validator_function(cls.validate),
|
|
122
|
+
core_schema.no_info_plain_validator_function(cls.validate), # pyright: ignore [reportPossiblyUnboundVariable]
|
|
123
123
|
]
|
|
124
124
|
)
|
|
125
125
|
|
|
126
|
-
return core_schema.json_or_python_schema(
|
|
126
|
+
return core_schema.json_or_python_schema( # pyright: ignore [reportPossiblyUnboundVariable]
|
|
127
127
|
json_schema=from_int_schema,
|
|
128
|
-
python_schema=core_schema.union_schema(
|
|
128
|
+
python_schema=core_schema.union_schema( # pyright: ignore [reportPossiblyUnboundVariable]
|
|
129
129
|
[
|
|
130
130
|
# check if it's an instance first before doing any further work
|
|
131
|
-
core_schema.is_instance_schema(UnionIntFloat),
|
|
131
|
+
core_schema.is_instance_schema(UnionIntFloat), # pyright: ignore [reportPossiblyUnboundVariable]
|
|
132
132
|
from_int_schema,
|
|
133
133
|
]
|
|
134
134
|
),
|
|
135
|
-
serialization=core_schema.plain_serializer_function_ser_schema(
|
|
135
|
+
serialization=core_schema.plain_serializer_function_ser_schema( # pyright: ignore [reportPossiblyUnboundVariable]
|
|
136
136
|
lambda instance: instance.value
|
|
137
137
|
),
|
|
138
138
|
)
|
|
@@ -161,7 +161,7 @@ def chain_as_tuple(*iterables: Iterable[T]) -> Tuple[T, ...]:
|
|
|
161
161
|
return tuple(chain(*iterables))
|
|
162
162
|
|
|
163
163
|
|
|
164
|
-
@lru_cache
|
|
164
|
+
@lru_cache
|
|
165
165
|
def _remove_none_from_type(
|
|
166
166
|
type_: str, split_pattern: Pattern[str], delimiter: str
|
|
167
167
|
) -> List[str]:
|
|
@@ -207,7 +207,7 @@ def _remove_none_from_union(type_: str, use_union_operator: bool) -> str:
|
|
|
207
207
|
return f'{UNION_PREFIX}{UNION_DELIMITER.join(inner_types)}]'
|
|
208
208
|
|
|
209
209
|
|
|
210
|
-
@lru_cache
|
|
210
|
+
@lru_cache
|
|
211
211
|
def get_optional_type(type_: str, use_union_operator: bool) -> str:
|
|
212
212
|
type_ = _remove_none_from_union(type_, use_union_operator)
|
|
213
213
|
|
|
@@ -236,7 +236,7 @@ class DataType(_BaseModel):
|
|
|
236
236
|
if PYDANTIC_V2:
|
|
237
237
|
# TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
|
|
238
238
|
# Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
|
|
239
|
-
model_config = ConfigDict(
|
|
239
|
+
model_config = ConfigDict( # pyright: ignore [reportAssignmentType]
|
|
240
240
|
extra='forbid',
|
|
241
241
|
revalidate_instances='never',
|
|
242
242
|
)
|
|
@@ -588,7 +588,9 @@ class DataTypeManager(ABC):
|
|
|
588
588
|
)
|
|
589
589
|
self.use_union_operator: bool = use_union_operator
|
|
590
590
|
self.use_pendulum: bool = use_pendulum
|
|
591
|
-
self.target_datetime_class: DatetimeClassType =
|
|
591
|
+
self.target_datetime_class: DatetimeClassType = (
|
|
592
|
+
target_datetime_class or DatetimeClassType.Datetime
|
|
593
|
+
)
|
|
592
594
|
|
|
593
595
|
if (
|
|
594
596
|
use_generic_container_types and python_version == PythonVersion.PY_36
|
datamodel_code_generator/util.py
CHANGED
|
@@ -37,17 +37,13 @@ else:
|
|
|
37
37
|
from yaml import SafeLoader
|
|
38
38
|
|
|
39
39
|
try:
|
|
40
|
-
import
|
|
41
|
-
|
|
42
|
-
def load_toml(path: Path) -> Dict[str, Any]:
|
|
43
|
-
with path.open('rb') as f:
|
|
44
|
-
return tomllib.load(f)
|
|
45
|
-
|
|
40
|
+
from tomllib import load as load_tomllib
|
|
46
41
|
except ImportError:
|
|
47
|
-
import
|
|
42
|
+
from tomli import load as load_tomllib
|
|
48
43
|
|
|
49
|
-
|
|
50
|
-
|
|
44
|
+
def load_toml(path: Path) -> Dict[str, Any]:
|
|
45
|
+
with path.open('rb') as f:
|
|
46
|
+
return load_tomllib(f)
|
|
51
47
|
|
|
52
48
|
|
|
53
49
|
SafeLoaderTemp = copy.deepcopy(SafeLoader)
|
|
@@ -81,7 +77,7 @@ def field_validator(
|
|
|
81
77
|
field_name: str,
|
|
82
78
|
*fields: str,
|
|
83
79
|
mode: Literal['before', 'after'] = 'after',
|
|
84
|
-
) -> Callable[[Any], Callable[[
|
|
80
|
+
) -> Callable[[Any], Callable[[BaseModel, Any], Any]]:
|
|
85
81
|
def inner(method: Callable[[Model, Any], Any]) -> Callable[[Model, Any], Any]:
|
|
86
82
|
if PYDANTIC_V2:
|
|
87
83
|
from pydantic import field_validator as field_validator_v2
|
|
@@ -103,4 +99,4 @@ else:
|
|
|
103
99
|
|
|
104
100
|
class BaseModel(_BaseModel):
|
|
105
101
|
if PYDANTIC_V2:
|
|
106
|
-
model_config = ConfigDict(strict=False)
|
|
102
|
+
model_config = ConfigDict(strict=False) # pyright: ignore [reportAssignmentType]
|
{datamodel_code_generator-0.26.5.dist-info → datamodel_code_generator-0.27.1.dist-info}/METADATA
RENAMED
|
@@ -1,16 +1,16 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: datamodel-code-generator
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.27.1
|
|
4
4
|
Summary: Datamodel Code Generator
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
Author: Koudai Aono
|
|
8
|
-
|
|
9
|
-
|
|
5
|
+
Project-URL: Homepage, https://github.com/koxudaxi/datamodel-code-generator
|
|
6
|
+
Project-URL: Source, https://github.com/koxudaxi/datamodel-code-generator
|
|
7
|
+
Author-email: Koudai Aono <koxudaxi@gmail.com>
|
|
8
|
+
License-Expression: MIT
|
|
9
|
+
License-File: LICENSE
|
|
10
10
|
Classifier: Development Status :: 4 - Beta
|
|
11
11
|
Classifier: License :: OSI Approved :: MIT License
|
|
12
12
|
Classifier: Natural Language :: English
|
|
13
|
-
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
14
14
|
Classifier: Programming Language :: Python :: 3.8
|
|
15
15
|
Classifier: Programming Language :: Python :: 3.9
|
|
16
16
|
Classifier: Programming Language :: Python :: 3.10
|
|
@@ -18,29 +18,31 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.12
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.13
|
|
20
20
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
21
|
+
Requires-Python: >=3.8
|
|
22
|
+
Requires-Dist: argcomplete<4,>=2.10.1
|
|
23
|
+
Requires-Dist: black>=19.10b0
|
|
24
|
+
Requires-Dist: genson<2,>=1.2.1
|
|
25
|
+
Requires-Dist: graphql-core>=3.2.3
|
|
26
|
+
Requires-Dist: httpx>=0.24.1
|
|
27
|
+
Requires-Dist: inflect<6,>=4.1
|
|
28
|
+
Requires-Dist: isort<7,>=4.3.21
|
|
29
|
+
Requires-Dist: jinja2<4,>=2.10.1
|
|
30
|
+
Requires-Dist: openapi-spec-validator<0.7,>=0.2.8
|
|
31
|
+
Requires-Dist: packaging
|
|
32
|
+
Requires-Dist: prance>=0.18.2
|
|
33
|
+
Requires-Dist: pydantic>=1.5
|
|
34
|
+
Requires-Dist: pysnooper<2,>=0.4.1
|
|
35
|
+
Requires-Dist: pyyaml>=6.0.1
|
|
36
|
+
Requires-Dist: tomli<3,>=2.2.1; python_version <= '3.11'
|
|
21
37
|
Provides-Extra: debug
|
|
38
|
+
Requires-Dist: pysnooper; extra == 'debug'
|
|
22
39
|
Provides-Extra: graphql
|
|
40
|
+
Requires-Dist: graphql-core; extra == 'graphql'
|
|
23
41
|
Provides-Extra: http
|
|
42
|
+
Requires-Dist: httpx; extra == 'http'
|
|
24
43
|
Provides-Extra: validation
|
|
25
|
-
Requires-Dist:
|
|
26
|
-
Requires-Dist:
|
|
27
|
-
Requires-Dist: black (>=19.10b0)
|
|
28
|
-
Requires-Dist: genson (>=1.2.1,<2.0)
|
|
29
|
-
Requires-Dist: graphql-core (>=3.2.3,<4.0.0) ; extra == "graphql"
|
|
30
|
-
Requires-Dist: httpx ; extra == "http"
|
|
31
|
-
Requires-Dist: inflect (>=4.1.0,<6.0)
|
|
32
|
-
Requires-Dist: isort (>=4.3.21,<6.0)
|
|
33
|
-
Requires-Dist: jinja2 (>=2.10.1,<4.0)
|
|
34
|
-
Requires-Dist: openapi-spec-validator (>=0.2.8,<0.7.0) ; extra == "validation"
|
|
35
|
-
Requires-Dist: packaging
|
|
36
|
-
Requires-Dist: prance (>=0.18.2) ; extra == "validation"
|
|
37
|
-
Requires-Dist: pydantic[email] (>=1.10.0,!=2.0.0,!=2.0.1,<3.0,!=2.4.0) ; python_version >= "3.12" and python_version < "4.0"
|
|
38
|
-
Requires-Dist: pydantic[email] (>=1.10.0,<3.0,!=2.4.0) ; python_version >= "3.11" and python_version < "4.0"
|
|
39
|
-
Requires-Dist: pydantic[email] (>=1.5.1,<3.0,!=2.4.0) ; python_version < "3.10"
|
|
40
|
-
Requires-Dist: pydantic[email] (>=1.9.0,<3.0,!=2.4.0) ; python_version >= "3.10" and python_version < "3.11"
|
|
41
|
-
Requires-Dist: pyyaml (>=6.0.1)
|
|
42
|
-
Requires-Dist: toml (>=0.10.0,<1.0.0) ; python_version < "3.11"
|
|
43
|
-
Project-URL: Repository, https://github.com/koxudaxi/datamodel-code-generator
|
|
44
|
+
Requires-Dist: openapi-spec-validator; extra == 'validation'
|
|
45
|
+
Requires-Dist: prance; extra == 'validation'
|
|
44
46
|
Description-Content-Type: text/markdown
|
|
45
47
|
|
|
46
48
|
# datamodel-code-generator
|
|
@@ -331,6 +333,8 @@ See the following linked projects for real world examples and inspiration.
|
|
|
331
333
|
- *[`Makefile`](https://github.com/argoproj-labs/hera/blob/c8cbf0c7a676de57469ca3d6aeacde7a5e84f8b7/Makefile#L53-L62)*
|
|
332
334
|
- [awslabs/aws-lambda-powertools-python](https://github.com/awslabs/aws-lambda-powertools-python)
|
|
333
335
|
- *Recommended for [advanced-use-cases](https://awslabs.github.io/aws-lambda-powertools-python/2.6.0/utilities/parser/#advanced-use-cases) in the official documentation*
|
|
336
|
+
- [cloudcoil/cloudcoil](https://github.com/cloudcoil/cloudcoil)
|
|
337
|
+
- *[Cloudcoil - Model generation](https://github.com/cloudcoil/cloudcoil#%EF%B8%8F-model-generation)
|
|
334
338
|
- [DataDog/integrations-core](https://github.com/DataDog/integrations-core)
|
|
335
339
|
- *[Config models](https://github.com/DataDog/integrations-core/blob/master/docs/developer/meta/config-models.md)*
|
|
336
340
|
- [hashintel/hash](https://github.com/hashintel/hash)
|
|
@@ -577,4 +581,3 @@ See `docs/development-contributing.md` for how to get started!
|
|
|
577
581
|
## License
|
|
578
582
|
|
|
579
583
|
datamodel-code-generator is released under the MIT License. http://www.opensource.org/licenses/mit-license
|
|
580
|
-
|
{datamodel_code_generator-0.26.5.dist-info → datamodel_code_generator-0.27.1.dist-info}/RECORD
RENAMED
|
@@ -1,28 +1,36 @@
|
|
|
1
|
-
datamodel_code_generator/__init__.py,sha256=
|
|
2
|
-
datamodel_code_generator/__main__.py,sha256=
|
|
1
|
+
datamodel_code_generator/__init__.py,sha256=3PRzUWeQD0qxsojEAidP65pv0NgYlGA3QZ46ZR1yoiE,20226
|
|
2
|
+
datamodel_code_generator/__main__.py,sha256=pul_cXj36i_qxK86dbOjpSf5W_6I6Febf_UEgq53fyo,21990
|
|
3
3
|
datamodel_code_generator/arguments.py,sha256=jf5LnhDl6LnCqRs05iAzvnUwt3bFnfUA43PIbv1xhks,16306
|
|
4
4
|
datamodel_code_generator/format.py,sha256=M2lag7AeB4eIHaTORu1A_RzMdIflINbypoeqsEYEEGY,8904
|
|
5
|
-
datamodel_code_generator/http.py,sha256=
|
|
6
|
-
datamodel_code_generator/imports.py,sha256=
|
|
7
|
-
datamodel_code_generator/
|
|
8
|
-
datamodel_code_generator/
|
|
5
|
+
datamodel_code_generator/http.py,sha256=9TkK7um8DgHnjPDMSwoGQdbEDmWcmx0j0eeLgmfo9Vc,790
|
|
6
|
+
datamodel_code_generator/imports.py,sha256=EjeVsxdyEBK4U7a5mn2RrECHt5URJEZKL46Qiwdc0jo,5726
|
|
7
|
+
datamodel_code_generator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
+
datamodel_code_generator/pydantic_patch.py,sha256=OJz79KF1hvZ0oDMsmSaA1Qh0XfPEDq0yBexAyEsHbL0,618
|
|
9
|
+
datamodel_code_generator/reference.py,sha256=BsKVIKGZrLAZHVzc30RfiibYH3qwXk8-7OGtu1MVXyk,26654
|
|
10
|
+
datamodel_code_generator/types.py,sha256=xVXM61xqgYFk1i6DBNhYJp36Z3S-aXuUW2W8j2w5ass,20396
|
|
11
|
+
datamodel_code_generator/util.py,sha256=r07oxWGC8Da6HaT-_wI6fBW9NG834IIZgyy_z2Z-2zc,2826
|
|
12
|
+
datamodel_code_generator/model/__init__.py,sha256=VpIso4gfS7rD6mr2bu2RAbsijt4WCwMRFSocUKd20gA,3599
|
|
13
|
+
datamodel_code_generator/model/base.py,sha256=dEMsUSwosHiQTchy37K_EVLJZut6WPHTw_Jb3X4L4mM,14797
|
|
9
14
|
datamodel_code_generator/model/dataclass.py,sha256=Ebn48PRvCOCcyKhxYxgYBbRcpIvXko-VomZ6N8gKrLA,5871
|
|
10
|
-
datamodel_code_generator/model/enum.py,sha256=
|
|
15
|
+
datamodel_code_generator/model/enum.py,sha256=4sSOd7I-57YLdY3hnOZH_o3Cgai-UphO9RvoOUe5s1o,3843
|
|
11
16
|
datamodel_code_generator/model/imports.py,sha256=9-JLfcilbRz9LI4Q9_YAVpRdIusULBaLsMhHE_6j0-w,784
|
|
12
17
|
datamodel_code_generator/model/msgspec.py,sha256=TevwsJDtgEzlpd7TvIpcMZ1HGw6gwLkm6yR86b_w8fY,11514
|
|
18
|
+
datamodel_code_generator/model/rootmodel.py,sha256=8bW7emVQtDGe2iUAmqtlQb607LvTRL0TBSP66pIeNzY,202
|
|
19
|
+
datamodel_code_generator/model/scalar.py,sha256=md13JdHahWqjuASbivhywvy4MQ8sZhCAaClhEvWrY3M,2596
|
|
20
|
+
datamodel_code_generator/model/typed_dict.py,sha256=W1r3NRy8uFkYe3YVnjL9PAGZdGyoSDcd-Otq7cxFDMM,4792
|
|
21
|
+
datamodel_code_generator/model/types.py,sha256=T3Xxa7MToHXIH1zXHT1P6PzE49aah0IhnwkCbYVc79c,3157
|
|
22
|
+
datamodel_code_generator/model/union.py,sha256=4LT5E46c2OH1dvQmvRWM7mX1Pziu_oWBHwXsGsylUbY,1791
|
|
13
23
|
datamodel_code_generator/model/pydantic/__init__.py,sha256=AYMjDCtnV4vweYqe1asTRCYdOo8IGLBhd8pEdxyY8ok,1372
|
|
14
|
-
datamodel_code_generator/model/pydantic/base_model.py,sha256=
|
|
24
|
+
datamodel_code_generator/model/pydantic/base_model.py,sha256=DlwdmDftlnygHs_BWSzK4YqfO3A6iygeDRWZJJpVxRg,12160
|
|
15
25
|
datamodel_code_generator/model/pydantic/custom_root_type.py,sha256=XOeJqzUEAYE21C3hPAnRIz9iDWIjZvUOWDc9MCrpdvw,299
|
|
16
26
|
datamodel_code_generator/model/pydantic/dataclass.py,sha256=sbqTmutl8Fjf1pYngfdv0NMXt904QcTRpHqmZy6GUiQ,424
|
|
17
27
|
datamodel_code_generator/model/pydantic/imports.py,sha256=2nSLYwphBUMQEa0PTSNwoLjEBslu02EQb6BdZ-S51yk,2189
|
|
18
|
-
datamodel_code_generator/model/pydantic/types.py,sha256=
|
|
28
|
+
datamodel_code_generator/model/pydantic/types.py,sha256=zWDeJpB3f3dIpAAtTS0UqIqUJSlm_ZU4bAmSnzBcFH8,13612
|
|
19
29
|
datamodel_code_generator/model/pydantic_v2/__init__.py,sha256=PWG0jyOaAIgaoPNKyko2-wihSOwzlkRAyEriagl09Cc,1018
|
|
20
|
-
datamodel_code_generator/model/pydantic_v2/base_model.py,sha256=
|
|
30
|
+
datamodel_code_generator/model/pydantic_v2/base_model.py,sha256=87vOqeHr1WfRwpwGpspy16IILAl3dizJCFqV81Ql5h4,8129
|
|
21
31
|
datamodel_code_generator/model/pydantic_v2/imports.py,sha256=n0yWg5QGDDzOseN35RJFlEbmV6oKMLQ8Kju1w4N6D08,263
|
|
22
32
|
datamodel_code_generator/model/pydantic_v2/root_model.py,sha256=iApUz1uGe4hHV8RyOK8rGjaEJcnqTJZqo-0uSfyVMGc,884
|
|
23
|
-
datamodel_code_generator/model/pydantic_v2/types.py,sha256=
|
|
24
|
-
datamodel_code_generator/model/rootmodel.py,sha256=8bW7emVQtDGe2iUAmqtlQb607LvTRL0TBSP66pIeNzY,202
|
|
25
|
-
datamodel_code_generator/model/scalar.py,sha256=md13JdHahWqjuASbivhywvy4MQ8sZhCAaClhEvWrY3M,2596
|
|
33
|
+
datamodel_code_generator/model/pydantic_v2/types.py,sha256=Y23PAyPxIG1-xmD1dc5HMMoSAaJ6J7V6iqZ_P7_huaw,2000
|
|
26
34
|
datamodel_code_generator/model/template/Enum.jinja2,sha256=k9lB8iQUsB94bPi8e3xJEd0AGk2ciWL-pSZuGY5kNPQ,378
|
|
27
35
|
datamodel_code_generator/model/template/Scalar.jinja2,sha256=Ss22-mYG3Vez-pbqmW2zFzwxGVhXkbQcAVTMV7POpg8,104
|
|
28
36
|
datamodel_code_generator/model/template/TypedDict.jinja2,sha256=J_Pe_CiuvTOb-EUCExXPaeTEFzn2keyrKB0wglZ8HgA,135
|
|
@@ -31,6 +39,7 @@ datamodel_code_generator/model/template/TypedDictFunction.jinja2,sha256=KjSij5_w
|
|
|
31
39
|
datamodel_code_generator/model/template/Union.jinja2,sha256=sq7o--2ESUSfIL4kCfgnr5ZXPFa_VeioqbATTY-N-5I,258
|
|
32
40
|
datamodel_code_generator/model/template/dataclass.jinja2,sha256=wRSy2g11Dr1GN9YUl13OZt2xg37bQyFwKn2wEsQIndE,865
|
|
33
41
|
datamodel_code_generator/model/template/msgspec.jinja2,sha256=qMuFOH6SFFh558wImdI6uIjG4Mtam3J_ox8Hmgqkv0g,1174
|
|
42
|
+
datamodel_code_generator/model/template/root.jinja2,sha256=3OTtibxLcGA-FMdR0QDCJUJQgf_kRW0OafeCTPFSFFo,162
|
|
34
43
|
datamodel_code_generator/model/template/pydantic/BaseModel.jinja2,sha256=sYZa-47YAXqZrd5cYKVnPrsbDvLkHEJOUd7M0nAosP8,1084
|
|
35
44
|
datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2,sha256=WDdTXYNTrkIw-B4OvPVxOaETTknLs0zdNuq_iDQ2Bcw,1000
|
|
36
45
|
datamodel_code_generator/model/template/pydantic/Config.jinja2,sha256=Ik028qdqQhDfEP207TCbwVv2b5Do1-nRNDPKzBHKzwM,135
|
|
@@ -38,23 +47,13 @@ datamodel_code_generator/model/template/pydantic/dataclass.jinja2,sha256=hM4OZTV
|
|
|
38
47
|
datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2,sha256=XdSCvA0hSdjkMtI9CA3M-2xBgieCOV-sWIfQvJPnJ4I,1119
|
|
39
48
|
datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2,sha256=xHvBYrh__32O1xRCSl6_u5zbyYIjB8a5k8fZiTo0spY,149
|
|
40
49
|
datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2,sha256=XQBlML7Hm5hN6_AExENNvVc_yxNWijcIfTTbbmegCpE,1223
|
|
41
|
-
datamodel_code_generator/model/template/root.jinja2,sha256=3OTtibxLcGA-FMdR0QDCJUJQgf_kRW0OafeCTPFSFFo,162
|
|
42
|
-
datamodel_code_generator/model/typed_dict.py,sha256=W1r3NRy8uFkYe3YVnjL9PAGZdGyoSDcd-Otq7cxFDMM,4792
|
|
43
|
-
datamodel_code_generator/model/types.py,sha256=T3Xxa7MToHXIH1zXHT1P6PzE49aah0IhnwkCbYVc79c,3157
|
|
44
|
-
datamodel_code_generator/model/union.py,sha256=4LT5E46c2OH1dvQmvRWM7mX1Pziu_oWBHwXsGsylUbY,1791
|
|
45
50
|
datamodel_code_generator/parser/__init__.py,sha256=zHbw6RPlJC0SAQjb-XyVlyZhcOu5PfYgPidy6jlUM8M,793
|
|
46
|
-
datamodel_code_generator/parser/base.py,sha256=
|
|
47
|
-
datamodel_code_generator/parser/graphql.py,sha256=
|
|
48
|
-
datamodel_code_generator/parser/jsonschema.py,sha256=
|
|
49
|
-
datamodel_code_generator/parser/openapi.py,sha256=
|
|
50
|
-
datamodel_code_generator/
|
|
51
|
-
datamodel_code_generator/
|
|
52
|
-
datamodel_code_generator/
|
|
53
|
-
datamodel_code_generator/
|
|
54
|
-
datamodel_code_generator/
|
|
55
|
-
datamodel_code_generator/version.py,sha256=TNxw8_M4v5PH0eredAbnMHbocjoh_Mh-PTaI4X2JiFY,24
|
|
56
|
-
datamodel_code_generator-0.26.5.dist-info/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
|
|
57
|
-
datamodel_code_generator-0.26.5.dist-info/METADATA,sha256=1j7kIzH6carcuQiFE5nMo45gOd8G4VNaL5p_hsu9ZS8,24960
|
|
58
|
-
datamodel_code_generator-0.26.5.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
|
59
|
-
datamodel_code_generator-0.26.5.dist-info/entry_points.txt,sha256=bykbUWqOCiKfxJPGe8jpNqTqD1NG7uyRmozdnwzu7rk,76
|
|
60
|
-
datamodel_code_generator-0.26.5.dist-info/RECORD,,
|
|
51
|
+
datamodel_code_generator/parser/base.py,sha256=cqSN9MWNz4Cqj2aWgDmCJmutf2XiCvHuhUXMicwuffQ,62601
|
|
52
|
+
datamodel_code_generator/parser/graphql.py,sha256=vg0tiKsZFRKL_AEbhISXHALr7yqDeiMVZZoqiKC51zA,22571
|
|
53
|
+
datamodel_code_generator/parser/jsonschema.py,sha256=7sCvtoRXuqe4xMPb0jydfNHx4WteBl2YXxUUj7KYoEI,71013
|
|
54
|
+
datamodel_code_generator/parser/openapi.py,sha256=Rqp2OPzrZYUE5-aG4phuQDDkm8efK7pWTcW1jJR1JtY,26780
|
|
55
|
+
datamodel_code_generator-0.27.1.dist-info/METADATA,sha256=_2aObqSLUMx4VRHVdHS1yPyVuYWtFUf8rHbVOAPPFIY,24844
|
|
56
|
+
datamodel_code_generator-0.27.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
57
|
+
datamodel_code_generator-0.27.1.dist-info/entry_points.txt,sha256=cJVcHiEViQMANaoM5C1xR5hzmyCqH6hHHMpV8W00in8,77
|
|
58
|
+
datamodel_code_generator-0.27.1.dist-info/licenses/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
|
|
59
|
+
datamodel_code_generator-0.27.1.dist-info/RECORD,,
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
version: str = '0.26.5'
|
|
File without changes
|