datamodel-code-generator 0.25.7__py3-none-any.whl → 0.25.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

@@ -29,6 +29,7 @@ from urllib.parse import ParseResult
29
29
 
30
30
  import yaml
31
31
 
32
+ import datamodel_code_generator.pydantic_patch # noqa: F401
32
33
  from datamodel_code_generator.format import PythonVersion
33
34
  from datamodel_code_generator.parser import DefaultPutDict, LiteralType
34
35
  from datamodel_code_generator.parser.base import Parser
@@ -301,6 +302,8 @@ def generate(
301
302
  custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
302
303
  use_pendulum: bool = False,
303
304
  http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
305
+ treat_dots_as_module: bool = False,
306
+ use_exact_imports: bool = False,
304
307
  ) -> None:
305
308
  remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
306
309
  if isinstance(input_, str):
@@ -461,6 +464,8 @@ def generate(
461
464
  custom_formatters_kwargs=custom_formatters_kwargs,
462
465
  use_pendulum=use_pendulum,
463
466
  http_query_parameters=http_query_parameters,
467
+ treat_dots_as_module=treat_dots_as_module,
468
+ use_exact_imports=use_exact_imports,
464
469
  **kwargs,
465
470
  )
466
471
 
@@ -218,16 +218,12 @@ class Config(BaseModel):
218
218
  def validate_additional_imports(cls, values: Dict[str, Any]) -> Dict[str, Any]:
219
219
  if values.get('additional_imports') is not None:
220
220
  values['additional_imports'] = values.get('additional_imports').split(',')
221
- else:
222
- values['additional_imports'] = []
223
221
  return values
224
222
 
225
223
  @model_validator(mode='before')
226
224
  def validate_custom_formatters(cls, values: Dict[str, Any]) -> Dict[str, Any]:
227
225
  if values.get('custom_formatters') is not None:
228
226
  values['custom_formatters'] = values.get('custom_formatters').split(',')
229
- else:
230
- values['custom_formatters'] = []
231
227
  return values
232
228
 
233
229
  if PYDANTIC_V2:
@@ -312,6 +308,8 @@ class Config(BaseModel):
312
308
  custom_formatters_kwargs: Optional[TextIOBase] = None
313
309
  use_pendulum: bool = False
314
310
  http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None
311
+ treat_dot_as_module: bool = False
312
+ use_exact_imports: bool = False
315
313
 
316
314
  def merge_args(self, args: Namespace) -> None:
317
315
  set_args = {
@@ -427,7 +425,7 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
427
425
  with config.custom_formatters_kwargs as data:
428
426
  try:
429
427
  custom_formatters_kwargs = json.load(data)
430
- except json.JSONDecodeError as e:
428
+ except json.JSONDecodeError as e: # pragma: no cover
431
429
  print(
432
430
  f'Unable to load custom_formatters_kwargs mapping: {e}',
433
431
  file=sys.stderr,
@@ -436,7 +434,7 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
436
434
  if not isinstance(custom_formatters_kwargs, dict) or not all(
437
435
  isinstance(k, str) and isinstance(v, str)
438
436
  for k, v in custom_formatters_kwargs.items()
439
- ):
437
+ ): # pragma: no cover
440
438
  print(
441
439
  'Custom formatters kwargs mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
442
440
  file=sys.stderr,
@@ -508,6 +506,8 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
508
506
  custom_formatters_kwargs=custom_formatters_kwargs,
509
507
  use_pendulum=config.use_pendulum,
510
508
  http_query_parameters=config.http_query_parameters,
509
+ treat_dots_as_module=config.treat_dot_as_module,
510
+ use_exact_imports=config.use_exact_imports,
511
511
  )
512
512
  return Exit.OK
513
513
  except InvalidClassNameError as e:
@@ -116,7 +116,7 @@ model_options.add_argument(
116
116
  '--collapse-root-models',
117
117
  action='store_true',
118
118
  default=None,
119
- help='Models generated with a root-type field will be merged'
119
+ help='Models generated with a root-type field will be merged '
120
120
  'into the models using that root-type model',
121
121
  )
122
122
  model_options.add_argument(
@@ -160,6 +160,12 @@ model_options.add_argument(
160
160
  help='target python version (default: 3.7)',
161
161
  choices=[v.value for v in PythonVersion],
162
162
  )
163
+ model_options.add_argument(
164
+ '--treat-dot-as-module',
165
+ help='treat dotted module names as modules',
166
+ action='store_true',
167
+ default=False,
168
+ )
163
169
  model_options.add_argument(
164
170
  '--use-schema-description',
165
171
  help='Use schema description to populate class docstring',
@@ -178,6 +184,13 @@ model_options.add_argument(
178
184
  action='store_true',
179
185
  default=False,
180
186
  )
187
+ model_options.add_argument(
188
+ '--use-exact-imports',
189
+ help='import exact types instead of modules, for example: "from .foo import Bar" instead of '
190
+ '"from . import foo" with "foo.Bar"',
191
+ action='store_true',
192
+ default=False,
193
+ )
181
194
 
182
195
  # ======================================================================================
183
196
  # Typing options for generated models
@@ -26,11 +26,12 @@ class Imports(DefaultDict[Optional[str], Set[str]]):
26
26
  def __str__(self) -> str:
27
27
  return self.dump()
28
28
 
29
- def __init__(self) -> None:
29
+ def __init__(self, use_exact: bool = False) -> None:
30
30
  super().__init__(set)
31
31
  self.alias: DefaultDict[Optional[str], Dict[str, str]] = defaultdict(dict)
32
32
  self.counter: Dict[Tuple[Optional[str], str], int] = defaultdict(int)
33
33
  self.reference_paths: Dict[str, Import] = {}
34
+ self.use_exact: bool = use_exact
34
35
 
35
36
  def _set_alias(self, from_: Optional[str], imports: Set[str]) -> List[str]:
36
37
  return [
@@ -76,7 +76,7 @@ class ConstraintsBase(_BaseModel):
76
76
  }
77
77
  constraints_class = a.__class__
78
78
  else:
79
- root_type_field_constraints = {}
79
+ root_type_field_constraints = {} # pragma: no cover
80
80
 
81
81
  if isinstance(b, ConstraintsBase): # pragma: no cover
82
82
  model_field_constraints = {
@@ -86,7 +86,7 @@ class ConstraintsBase(_BaseModel):
86
86
  else:
87
87
  model_field_constraints = {}
88
88
 
89
- if not issubclass(constraints_class, ConstraintsBase):
89
+ if not issubclass(constraints_class, ConstraintsBase): # pragma: no cover
90
90
  return None
91
91
 
92
92
  return constraints_class.parse_obj(
@@ -33,7 +33,7 @@ if TYPE_CHECKING:
33
33
  else:
34
34
  try:
35
35
  from typing import Literal
36
- except ImportError:
36
+ except ImportError: # pragma: no cover
37
37
  from typing_extensions import Literal
38
38
 
39
39
 
@@ -106,7 +106,7 @@ class DataModelField(DataModelFieldV1):
106
106
 
107
107
  @field_validator('extras')
108
108
  def validate_extras(cls, values: Any) -> Dict[str, Any]:
109
- if not isinstance(values, dict):
109
+ if not isinstance(values, dict): # pragma: no cover
110
110
  return values
111
111
  if 'examples' in values:
112
112
  return values
@@ -146,7 +146,7 @@ class DataModelField(DataModelFieldV1):
146
146
  self, field_arguments: List[str]
147
147
  ) -> List[str]:
148
148
  if not self.required or self.const:
149
- if self.use_default_kwarg:
149
+ if self.use_default_kwarg: # pragma: no cover
150
150
  return [
151
151
  f'default={repr(self.default)}',
152
152
  *field_arguments,
@@ -215,7 +215,7 @@ class BaseModel(BaseModelBase):
215
215
  else self.extra_template_data[from_]
216
216
  )
217
217
  for data_type in self.all_data_types:
218
- if data_type.is_custom_type:
218
+ if data_type.is_custom_type: # pragma: no cover
219
219
  config_parameters['arbitrary_types_allowed'] = True
220
220
  break
221
221
 
@@ -238,6 +238,14 @@ def relative(current_module: str, reference: str) -> Tuple[str, str]:
238
238
  return left, right
239
239
 
240
240
 
241
+ def exact_import(from_: str, import_: str, short_name: str) -> Tuple[str, str]:
242
+ if from_ == '.':
243
+ # Prevents "from . import foo" becoming "from ..foo import Foo"
244
+ # when our imported module has the same parent
245
+ return f'.{import_}', short_name
246
+ return f'{from_}.{import_}', short_name
247
+
248
+
241
249
  @runtime_checkable
242
250
  class Child(Protocol):
243
251
  @property
@@ -295,7 +303,7 @@ def _copy_data_types(data_types: List[DataType]) -> List[DataType]:
295
303
  copied_data_types.append(
296
304
  data_type_.__class__(reference=data_type_.reference)
297
305
  )
298
- elif data_type_.data_types:
306
+ elif data_type_.data_types: # pragma: no cover
299
307
  copied_data_type = data_type_.copy()
300
308
  copied_data_type.data_types = _copy_data_types(data_type_.data_types)
301
309
  copied_data_types.append(copied_data_type)
@@ -392,6 +400,8 @@ class Parser(ABC):
392
400
  custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
393
401
  use_pendulum: bool = False,
394
402
  http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
403
+ treat_dots_as_module: bool = False,
404
+ use_exact_imports: bool = False,
395
405
  ) -> None:
396
406
  self.data_type_manager: DataTypeManager = data_type_manager_type(
397
407
  python_version=target_python_version,
@@ -405,7 +415,8 @@ class Parser(ABC):
405
415
  self.data_model_root_type: Type[DataModel] = data_model_root_type
406
416
  self.data_model_field_type: Type[DataModelFieldBase] = data_model_field_type
407
417
 
408
- self.imports: Imports = Imports()
418
+ self.imports: Imports = Imports(use_exact_imports)
419
+ self.use_exact_imports: bool = use_exact_imports
409
420
  self._append_additional_imports(additional_imports=additional_imports)
410
421
 
411
422
  self.base_class: Optional[str] = base_class
@@ -514,6 +525,7 @@ class Parser(ABC):
514
525
  self.known_third_party = known_third_party
515
526
  self.custom_formatter = custom_formatters
516
527
  self.custom_formatters_kwargs = custom_formatters_kwargs
528
+ self.treat_dots_as_module = treat_dots_as_module
517
529
 
518
530
  @property
519
531
  def iter_source(self) -> Iterator[Source]:
@@ -544,6 +556,8 @@ class Parser(ABC):
544
556
  additional_imports = []
545
557
 
546
558
  for additional_import_string in additional_imports:
559
+ if additional_import_string is None:
560
+ continue
547
561
  new_import = Import.from_full_path(additional_import_string)
548
562
  self.imports.append(new_import)
549
563
 
@@ -652,7 +666,7 @@ class Parser(ABC):
652
666
  for model in models:
653
667
  class_name: str = model.class_name
654
668
  generated_name: str = scoped_model_resolver.add(
655
- model.path, class_name, unique=True, class_name=True
669
+ [model.path], class_name, unique=True, class_name=True
656
670
  ).name
657
671
  if class_name != generated_name:
658
672
  model.class_name = generated_name
@@ -666,16 +680,15 @@ class Parser(ABC):
666
680
  model.class_name = duplicate_name
667
681
  model_names[duplicate_name] = model
668
682
 
669
- @classmethod
670
683
  def __change_from_import(
671
- cls,
684
+ self,
672
685
  models: List[DataModel],
673
686
  imports: Imports,
674
687
  scoped_model_resolver: ModelResolver,
675
688
  init: bool,
676
689
  ) -> None:
677
690
  for model in models:
678
- scoped_model_resolver.add(model.path, model.class_name)
691
+ scoped_model_resolver.add([model.path], model.class_name)
679
692
  for model in models:
680
693
  before_import = model.imports
681
694
  imports.append(before_import)
@@ -700,7 +713,18 @@ class Parser(ABC):
700
713
  from_, import_ = full_path = relative(
701
714
  model.module_name, data_type.full_name
702
715
  )
716
+ if imports.use_exact: # pragma: no cover
717
+ from_, import_ = exact_import(
718
+ from_, import_, data_type.reference.short_name
719
+ )
703
720
  import_ = import_.replace('-', '_')
721
+ if (
722
+ len(model.module_path) > 1
723
+ and model.module_path[-1].count('.') > 0
724
+ and not self.treat_dots_as_module
725
+ ):
726
+ rel_path_depth = model.module_path[-1].count('.')
727
+ from_ = from_[rel_path_depth:]
704
728
 
705
729
  alias = scoped_model_resolver.add(full_path, import_).name
706
730
 
@@ -833,11 +857,16 @@ class Parser(ABC):
833
857
  required=True,
834
858
  )
835
859
  )
836
- imports.append(
860
+ literal = (
837
861
  IMPORT_LITERAL
838
862
  if self.target_python_version.has_literal_type
839
863
  else IMPORT_LITERAL_BACKPORT
840
864
  )
865
+ has_imported_literal = any(
866
+ literal == import_ for import_ in imports
867
+ )
868
+ if has_imported_literal: # pragma: no cover
869
+ imports.append(literal)
841
870
 
842
871
  @classmethod
843
872
  def _create_set_from_list(cls, data_type: DataType) -> Optional[DataType]:
@@ -931,7 +960,11 @@ class Parser(ABC):
931
960
  models.remove(duplicate)
932
961
 
933
962
  def __collapse_root_models(
934
- self, models: List[DataModel], unused_models: List[DataModel], imports: Imports
963
+ self,
964
+ models: List[DataModel],
965
+ unused_models: List[DataModel],
966
+ imports: Imports,
967
+ scoped_model_resolver: ModelResolver,
935
968
  ) -> None:
936
969
  if not self.collapse_root_models:
937
970
  return None
@@ -959,7 +992,7 @@ class Parser(ABC):
959
992
  if d.is_dict or d.is_union
960
993
  )
961
994
  ):
962
- continue
995
+ continue # pragma: no cover
963
996
 
964
997
  # set copied data_type
965
998
  copied_data_type = root_type_field.data_type.copy()
@@ -985,12 +1018,15 @@ class Parser(ABC):
985
1018
  root_type_field.constraints, model_field.constraints
986
1019
  )
987
1020
  if isinstance(
988
- root_type_field, pydantic_model.DataModelField
989
- ) and not model_field.extras.get('discriminator'): # no: pragma
1021
+ root_type_field,
1022
+ pydantic_model.DataModelField,
1023
+ ) and not model_field.extras.get('discriminator'):
990
1024
  discriminator = root_type_field.extras.get('discriminator')
991
- if discriminator: # no: pragma
1025
+ if discriminator:
992
1026
  model_field.extras['discriminator'] = discriminator
993
- data_type.parent.data_types.remove(data_type)
1027
+ data_type.parent.data_types.remove(
1028
+ data_type
1029
+ ) # pragma: no cover
994
1030
  data_type.parent.data_types.append(copied_data_type)
995
1031
 
996
1032
  elif isinstance(data_type.parent, DataType):
@@ -1003,6 +1039,31 @@ class Parser(ABC):
1003
1039
  ]
1004
1040
  else: # pragma: no cover
1005
1041
  continue
1042
+
1043
+ for d in root_type_field.data_type.data_types:
1044
+ if d.reference is None:
1045
+ continue
1046
+ from_, import_ = full_path = relative(
1047
+ model.module_name, d.full_name
1048
+ )
1049
+ if from_ and import_:
1050
+ alias = scoped_model_resolver.add(full_path, import_)
1051
+ d.alias = (
1052
+ alias.name
1053
+ if d.reference.short_name == import_
1054
+ else f'{alias.name}.{d.reference.short_name}'
1055
+ )
1056
+ imports.append(
1057
+ [
1058
+ Import(
1059
+ from_=from_,
1060
+ import_=import_,
1061
+ alias=alias.name,
1062
+ reference_path=d.reference.path,
1063
+ )
1064
+ ]
1065
+ )
1066
+
1006
1067
  original_field = get_most_of_parent(data_type, DataModelFieldBase)
1007
1068
  if original_field: # pragma: no cover
1008
1069
  # TODO: Improve detection of reference type
@@ -1147,6 +1208,32 @@ class Parser(ABC):
1147
1208
  if model_field.nullable is not True: # pragma: no cover
1148
1209
  model_field.nullable = False
1149
1210
 
1211
+ @classmethod
1212
+ def __postprocess_result_modules(cls, results):
1213
+ def process(input_tuple) -> Tuple[str, ...]:
1214
+ r = []
1215
+ for item in input_tuple:
1216
+ p = item.split('.')
1217
+ if len(p) > 1:
1218
+ r.extend(p[:-1])
1219
+ r.append(p[-1])
1220
+ else:
1221
+ r.append(item)
1222
+
1223
+ r = r[:-2] + [f'{r[-2]}.{r[-1]}']
1224
+ return tuple(r)
1225
+
1226
+ results = {process(k): v for k, v in results.items()}
1227
+
1228
+ init_result = [v for k, v in results.items() if k[-1] == '__init__.py'][0]
1229
+ folders = {t[:-1] if t[-1].endswith('.py') else t for t in results.keys()}
1230
+ for folder in folders:
1231
+ for i in range(len(folder)):
1232
+ subfolder = folder[: i + 1]
1233
+ init_file = subfolder + ('__init__.py',)
1234
+ results.update({init_file: init_result})
1235
+ return results
1236
+
1150
1237
  def __change_imported_model_name(
1151
1238
  self,
1152
1239
  models: List[DataModel],
@@ -1205,9 +1292,12 @@ class Parser(ABC):
1205
1292
  def module_key(data_model: DataModel) -> Tuple[str, ...]:
1206
1293
  return tuple(data_model.module_path)
1207
1294
 
1295
+ def sort_key(data_model: DataModel) -> Tuple[int, Tuple[str, ...]]:
1296
+ return (len(data_model.module_path), tuple(data_model.module_path))
1297
+
1208
1298
  # process in reverse order to correctly establish module levels
1209
1299
  grouped_models = groupby(
1210
- sorted(sorted_data_models.values(), key=module_key, reverse=True),
1300
+ sorted(sorted_data_models.values(), key=sort_key, reverse=True),
1211
1301
  key=module_key,
1212
1302
  )
1213
1303
 
@@ -1250,7 +1340,7 @@ class Parser(ABC):
1250
1340
  processed_models: List[Processed] = []
1251
1341
 
1252
1342
  for module, models in module_models:
1253
- imports = module_to_import[module] = Imports()
1343
+ imports = module_to_import[module] = Imports(self.use_exact_imports)
1254
1344
  init = False
1255
1345
  if module:
1256
1346
  parent = (*module[:-1], '__init__.py')
@@ -1273,7 +1363,9 @@ class Parser(ABC):
1273
1363
  self.__extract_inherited_enum(models)
1274
1364
  self.__set_reference_default_value_to_field(models)
1275
1365
  self.__reuse_model(models, require_update_action_models)
1276
- self.__collapse_root_models(models, unused_models, imports)
1366
+ self.__collapse_root_models(
1367
+ models, unused_models, imports, scoped_model_resolver
1368
+ )
1277
1369
  self.__set_default_enum_member(models)
1278
1370
  self.__sort_models(models, imports)
1279
1371
  self.__set_one_literal_on_default(models)
@@ -1283,6 +1375,10 @@ class Parser(ABC):
1283
1375
  Processed(module, models, init, imports, scoped_model_resolver)
1284
1376
  )
1285
1377
 
1378
+ for processed_model in processed_models:
1379
+ for model in processed_model.models:
1380
+ processed_model.imports.append(model.imports)
1381
+
1286
1382
  for unused_model in unused_models:
1287
1383
  module, models = model_to_module_models[unused_model]
1288
1384
  if unused_model in models: # pragma: no cover
@@ -1290,28 +1386,42 @@ class Parser(ABC):
1290
1386
  imports.remove(unused_model.imports)
1291
1387
  models.remove(unused_model)
1292
1388
 
1389
+ for processed_model in processed_models:
1390
+ # postprocess imports to remove unused imports.
1391
+ model_code = str('\n'.join([str(m) for m in processed_model.models]))
1392
+ unused_imports = [
1393
+ (from_, import_)
1394
+ for from_, imports_ in processed_model.imports.items()
1395
+ for import_ in imports_
1396
+ if import_ not in model_code
1397
+ ]
1398
+ for from_, import_ in unused_imports:
1399
+ processed_model.imports.remove(Import(from_=from_, import_=import_))
1400
+
1293
1401
  for module, models, init, imports, scoped_model_resolver in processed_models:
1294
1402
  # process after removing unused models
1295
1403
  self.__change_imported_model_name(models, imports, scoped_model_resolver)
1296
1404
 
1297
1405
  for module, models, init, imports, scoped_model_resolver in processed_models:
1298
1406
  result: List[str] = []
1299
- if with_import:
1300
- result += [str(self.imports), str(imports), '\n']
1301
-
1302
- code = dump_templates(models)
1303
- result += [code]
1304
-
1305
- if self.dump_resolve_reference_action is not None:
1306
- result += [
1307
- '\n',
1308
- self.dump_resolve_reference_action(
1309
- m.reference.short_name
1310
- for m in models
1311
- if m.path in require_update_action_models
1312
- ),
1313
- ]
1314
-
1407
+ if models:
1408
+ if with_import:
1409
+ result += [str(self.imports), str(imports), '\n']
1410
+
1411
+ code = dump_templates(models)
1412
+ result += [code]
1413
+
1414
+ if self.dump_resolve_reference_action is not None:
1415
+ result += [
1416
+ '\n',
1417
+ self.dump_resolve_reference_action(
1418
+ m.reference.short_name
1419
+ for m in models
1420
+ if m.path in require_update_action_models
1421
+ ),
1422
+ ]
1423
+ if not result and not init:
1424
+ continue
1315
1425
  body = '\n'.join(result)
1316
1426
  if code_formatter:
1317
1427
  body = code_formatter.format_code(body)
@@ -1324,4 +1434,20 @@ class Parser(ABC):
1324
1434
  if [*results] == [('__init__.py',)]:
1325
1435
  return results[('__init__.py',)].body
1326
1436
 
1437
+ results = {tuple(i.replace('-', '_') for i in k): v for k, v in results.items()}
1438
+ results = (
1439
+ self.__postprocess_result_modules(results)
1440
+ if self.treat_dots_as_module
1441
+ else {
1442
+ tuple(
1443
+ (
1444
+ part[: part.rfind('.')].replace('.', '_')
1445
+ + part[part.rfind('.') :]
1446
+ )
1447
+ for part in k
1448
+ ): v
1449
+ for k, v in results.items()
1450
+ }
1451
+ )
1452
+
1327
1453
  return results
@@ -37,11 +37,7 @@ from datamodel_code_generator.parser.base import (
37
37
  escape_characters,
38
38
  )
39
39
  from datamodel_code_generator.reference import ModelType, Reference
40
- from datamodel_code_generator.types import (
41
- DataTypeManager,
42
- StrictTypes,
43
- Types,
44
- )
40
+ from datamodel_code_generator.types import DataTypeManager, StrictTypes, Types
45
41
 
46
42
  try:
47
43
  import graphql
@@ -158,6 +154,8 @@ class GraphQLParser(Parser):
158
154
  custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
159
155
  use_pendulum: bool = False,
160
156
  http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
157
+ treat_dots_as_module: bool = False,
158
+ use_exact_imports: bool = False,
161
159
  ) -> None:
162
160
  super().__init__(
163
161
  source=source,
@@ -225,18 +223,22 @@ class GraphQLParser(Parser):
225
223
  custom_formatters_kwargs=custom_formatters_kwargs,
226
224
  use_pendulum=use_pendulum,
227
225
  http_query_parameters=http_query_parameters,
226
+ treat_dots_as_module=treat_dots_as_module,
227
+ use_exact_imports=use_exact_imports,
228
228
  )
229
229
 
230
230
  self.data_model_scalar_type = data_model_scalar_type
231
231
  self.data_model_union_type = data_model_union_type
232
+ self.use_standard_collections = use_standard_collections
233
+ self.use_union_operator = use_union_operator
232
234
 
233
235
  def _get_context_source_path_parts(self) -> Iterator[Tuple[Source, List[str]]]:
234
236
  # TODO (denisart): Temporarily this method duplicates
235
237
  # the method `datamodel_code_generator.parser.jsonschema.JsonSchemaParser._get_context_source_path_parts`.
236
238
 
237
- if isinstance(self.source, list) or (
239
+ if isinstance(self.source, list) or ( # pragma: no cover
238
240
  isinstance(self.source, Path) and self.source.is_dir()
239
- ):
241
+ ): # pragma: no cover
240
242
  self.current_source_path = Path()
241
243
  self.model_resolver.after_load_files = {
242
244
  self.base_path.joinpath(s.path).resolve().as_posix()
@@ -244,11 +246,11 @@ class GraphQLParser(Parser):
244
246
  }
245
247
 
246
248
  for source in self.iter_source:
247
- if isinstance(self.source, ParseResult):
249
+ if isinstance(self.source, ParseResult): # pragma: no cover
248
250
  path_parts = self.get_url_path_parts(self.source)
249
251
  else:
250
252
  path_parts = list(source.path.parts)
251
- if self.current_source_path is not None:
253
+ if self.current_source_path is not None: # pragma: no cover
252
254
  self.current_source_path = source.path
253
255
  with self.model_resolver.current_base_path_context(
254
256
  source.path.parent
@@ -265,7 +267,7 @@ class GraphQLParser(Parser):
265
267
 
266
268
  resolved_type = graphql_resolver.kind(type_, None)
267
269
 
268
- if resolved_type in self.support_graphql_types:
270
+ if resolved_type in self.support_graphql_types: # pragma: no cover
269
271
  self.all_graphql_objects[type_.name] = type_
270
272
  # TODO: need a special method for each graph type
271
273
  self.references[type_.name] = Reference(
@@ -279,8 +281,13 @@ class GraphQLParser(Parser):
279
281
  def _typename_field(self, name: str) -> DataModelFieldBase:
280
282
  return self.data_model_field_type(
281
283
  name='typename__',
282
- data_type=DataType(literals=[name]),
284
+ data_type=DataType(
285
+ literals=[name],
286
+ use_union_operator=self.use_union_operator,
287
+ use_standard_collections=self.use_standard_collections,
288
+ ),
283
289
  default=name,
290
+ use_annotated=self.use_annotated,
284
291
  required=False,
285
292
  alias='__typename',
286
293
  use_one_literal_as_default=True,
@@ -344,7 +351,11 @@ class GraphQLParser(Parser):
344
351
  alias: str,
345
352
  field: Union[graphql.GraphQLField, graphql.GraphQLInputField],
346
353
  ) -> DataModelFieldBase:
347
- final_data_type = DataType(is_optional=True)
354
+ final_data_type = DataType(
355
+ is_optional=True,
356
+ use_union_operator=self.use_union_operator,
357
+ use_standard_collections=self.use_standard_collections,
358
+ )
348
359
  data_type = final_data_type
349
360
  obj = field.type
350
361
 
@@ -352,11 +363,15 @@ class GraphQLParser(Parser):
352
363
  if graphql.is_list_type(obj):
353
364
  data_type.is_list = True
354
365
 
355
- new_data_type = DataType(is_optional=True)
366
+ new_data_type = DataType(
367
+ is_optional=True,
368
+ use_union_operator=self.use_union_operator,
369
+ use_standard_collections=self.use_standard_collections,
370
+ )
356
371
  data_type.data_types = [new_data_type]
357
372
 
358
373
  data_type = new_data_type
359
- elif graphql.is_non_null_type(obj):
374
+ elif graphql.is_non_null_type(obj): # pragma: no cover
360
375
  data_type.is_optional = False
361
376
 
362
377
  obj = obj.of_type
@@ -368,10 +383,10 @@ class GraphQLParser(Parser):
368
383
  )
369
384
  extras = {}
370
385
 
371
- if hasattr(field, 'default_value'):
372
- if field.default_value == graphql.pyutils.Undefined:
386
+ if hasattr(field, 'default_value'): # pragma: no cover
387
+ if field.default_value == graphql.pyutils.Undefined: # pragma: no cover
373
388
  default = None
374
- else:
389
+ else: # pragma: no cover
375
390
  default = field.default_value
376
391
  else:
377
392
  if required is False:
@@ -421,7 +436,7 @@ class GraphQLParser(Parser):
421
436
  fields.append(self._typename_field(obj.name))
422
437
 
423
438
  base_classes = []
424
- if hasattr(obj, 'interfaces'):
439
+ if hasattr(obj, 'interfaces'): # pragma: no cover
425
440
  base_classes = [self.references[i.name] for i in obj.interfaces]
426
441
 
427
442
  data_model_type = self.data_model_type(
@@ -447,7 +462,7 @@ class GraphQLParser(Parser):
447
462
  def parse_input_object(
448
463
  self, input_graphql_object: graphql.GraphQLInputObjectType
449
464
  ) -> None:
450
- self.parse_object_like(input_graphql_object)
465
+ self.parse_object_like(input_graphql_object) # pragma: no cover
451
466
 
452
467
  def parse_union(self, union_object: graphql.GraphQLUnionType) -> None:
453
468
  fields = []
@@ -440,6 +440,8 @@ class JsonSchemaParser(Parser):
440
440
  custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
441
441
  use_pendulum: bool = False,
442
442
  http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
443
+ treat_dots_as_module: bool = False,
444
+ use_exact_imports: bool = False,
443
445
  ) -> None:
444
446
  super().__init__(
445
447
  source=source,
@@ -507,6 +509,8 @@ class JsonSchemaParser(Parser):
507
509
  custom_formatters_kwargs=custom_formatters_kwargs,
508
510
  use_pendulum=use_pendulum,
509
511
  http_query_parameters=http_query_parameters,
512
+ treat_dots_as_module=treat_dots_as_module,
513
+ use_exact_imports=use_exact_imports,
510
514
  )
511
515
 
512
516
  self.remote_object_cache: DefaultPutDict[str, Dict[str, Any]] = DefaultPutDict()
@@ -758,10 +762,10 @@ class JsonSchemaParser(Parser):
758
762
  return self.data_type(reference=base_classes[0])
759
763
  if required:
760
764
  for field in fields:
761
- if self.force_optional_for_required_fields or (
765
+ if self.force_optional_for_required_fields or ( # pragma: no cover
762
766
  self.apply_default_values_for_required_fields and field.has_default
763
767
  ):
764
- continue
768
+ continue # pragma: no cover
765
769
  if (field.original_name or field.name) in required:
766
770
  field.required = True
767
771
  if obj.required:
@@ -1310,11 +1314,11 @@ class JsonSchemaParser(Parser):
1310
1314
  elif obj.custom_type_path:
1311
1315
  data_type = self.data_type_manager.get_data_type_from_full_path(
1312
1316
  obj.custom_type_path, is_custom_type=True
1313
- )
1317
+ ) # pragma: no cover
1314
1318
  elif obj.is_array:
1315
1319
  data_type = self.parse_array_fields(
1316
1320
  name, obj, get_special_path('array', path)
1317
- ).data_type
1321
+ ).data_type # pragma: no cover
1318
1322
  elif obj.anyOf or obj.oneOf:
1319
1323
  reference = self.model_resolver.add(
1320
1324
  path, name, loaded=True, class_name=True
@@ -1328,9 +1332,9 @@ class JsonSchemaParser(Parser):
1328
1332
  name, obj, get_special_path('oneOf', path)
1329
1333
  )
1330
1334
 
1331
- if len(data_types) > 1:
1335
+ if len(data_types) > 1: # pragma: no cover
1332
1336
  data_type = self.data_type(data_types=data_types)
1333
- elif not data_types:
1337
+ elif not data_types: # pragma: no cover
1334
1338
  return EmptyDataType()
1335
1339
  else: # pragma: no cover
1336
1340
  data_type = data_types[0]
@@ -1659,7 +1663,7 @@ class JsonSchemaParser(Parser):
1659
1663
  elif obj.oneOf or obj.anyOf:
1660
1664
  data_type = self.parse_root_type(name, obj, path)
1661
1665
  if isinstance(data_type, EmptyDataType) and obj.properties:
1662
- self.parse_object(name, obj, path)
1666
+ self.parse_object(name, obj, path) # pragma: no cover
1663
1667
  elif obj.properties:
1664
1668
  self.parse_object(name, obj, path)
1665
1669
  elif obj.patternProperties:
@@ -222,6 +222,8 @@ class OpenAPIParser(JsonSchemaParser):
222
222
  custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
223
223
  use_pendulum: bool = False,
224
224
  http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
225
+ treat_dots_as_module: bool = False,
226
+ use_exact_imports: bool = False,
225
227
  ):
226
228
  super().__init__(
227
229
  source=source,
@@ -289,6 +291,8 @@ class OpenAPIParser(JsonSchemaParser):
289
291
  custom_formatters_kwargs=custom_formatters_kwargs,
290
292
  use_pendulum=use_pendulum,
291
293
  http_query_parameters=http_query_parameters,
294
+ treat_dots_as_module=treat_dots_as_module,
295
+ use_exact_imports=use_exact_imports,
292
296
  )
293
297
  self.open_api_scopes: List[OpenAPIScope] = openapi_scopes or [
294
298
  OpenAPIScope.Schemas
@@ -0,0 +1,21 @@
1
+ import sys
2
+
3
+ import pydantic.typing
4
+
5
+
6
+ def patched_evaluate_forwardref(
7
+ forward_ref, globalns, localns=None
8
+ ): # pragma: no cover
9
+ try:
10
+ return forward_ref._evaluate(
11
+ globalns, localns or None, set()
12
+ ) # pragma: no cover
13
+ except TypeError:
14
+ # Fallback for Python 3.12 compatibility
15
+ return forward_ref._evaluate(
16
+ globalns, localns or None, set(), recursive_guard=set()
17
+ )
18
+
19
+
20
+ if '3.12' in sys.version: # pragma: no cover
21
+ pydantic.typing.evaluate_forwardref = patched_evaluate_forwardref
@@ -25,12 +25,7 @@ from typing import (
25
25
 
26
26
  import pydantic
27
27
  from packaging import version
28
- from pydantic import (
29
- StrictBool,
30
- StrictInt,
31
- StrictStr,
32
- create_model,
33
- )
28
+ from pydantic import StrictBool, StrictInt, StrictStr, create_model
34
29
 
35
30
  from datamodel_code_generator.format import PythonVersion
36
31
  from datamodel_code_generator.imports import (
@@ -1 +1 @@
1
- version: str = '0.25.7'
1
+ version: str = '0.25.9'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamodel-code-generator
3
- Version: 0.25.7
3
+ Version: 0.25.9
4
4
  Summary: Datamodel Code Generator
5
5
  Home-page: https://github.com/koxudaxi/datamodel-code-generator
6
6
  License: MIT
@@ -61,9 +61,6 @@ and [msgspec.Struct](https://github.com/jcrist/msgspec) from an openapi file and
61
61
  ## Help
62
62
  See [documentation](https://koxudaxi.github.io/datamodel-code-generator) for more details.
63
63
 
64
- ## Sponsors
65
- [![JetBrains](https://avatars.githubusercontent.com/u/60931315?s=200&v=4)](https://github.com/JetBrainsOfficial)
66
-
67
64
  ## Quick Installation
68
65
 
69
66
  To install `datamodel-code-generator`:
@@ -281,13 +278,52 @@ class Apis(BaseModel):
281
278
  ```
282
279
  </details>
283
280
 
281
+ ## Supported input types
282
+ - OpenAPI 3 (YAML/JSON, [OpenAPI Data Type](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#data-types));
283
+ - JSON Schema ([JSON Schema Core](http://json-schema.org/draft/2019-09/json-schema-validation.html)/[JSON Schema Validation](http://json-schema.org/draft/2019-09/json-schema-validation.html));
284
+ - JSON/YAML/CSV Data (it will be converted to JSON Schema);
285
+ - Python dictionary (it will be converted to JSON Schema);
286
+ - GraphQL schema ([GraphQL Schemas and Types](https://graphql.org/learn/schema/));
287
+
288
+ ## Supported output types
289
+ - [pydantic](https://docs.pydantic.dev/1.10/).BaseModel;
290
+ - [pydantic_v2](https://docs.pydantic.dev/2.0/).BaseModel;
291
+ - [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html);
292
+ - [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict);
293
+ - [msgspec.Struct](https://github.com/jcrist/msgspec);
294
+ - Custom type from your [jinja2](https://jinja.palletsprojects.com/en/3.1.x/) template;
295
+
296
+ ## Sponsors
297
+ <table>
298
+ <tr>
299
+ <td valign="top" align="center">
300
+ <a href="https://github.com/JetBrainsOfficial">
301
+ <img src="https://avatars.githubusercontent.com/u/60931315?s=100&v=4" alt="JetBrains Logo" style="width: 100px;">
302
+ <p>JetBrains</p>
303
+ </a>
304
+ </td>
305
+ <td valign="top" align="center">
306
+ <a href="https://github.com/astral-sh">
307
+ <img src="https://avatars.githubusercontent.com/u/115962839?s=200&v=4" alt="Astral Logo" style="width: 100px;">
308
+ <p>Astral</p>
309
+ </a>
310
+ </td>
311
+ <td valign="top" align="center">
312
+ <a href="https://github.com/DataDog">
313
+ <img src="https://avatars.githubusercontent.com/u/365230?s=200&v=4" alt="Datadog, Inc. Logo" style="width: 100px;">
314
+ <p>Datadog, Inc.</p>
315
+ </a>
316
+ </td>
317
+ </tr>
318
+ </table>
319
+
284
320
  ## Projects that use datamodel-code-generator
285
321
 
286
322
  These OSS projects use datamodel-code-generator to generate many models.
287
323
  See the following linked projects for real world examples and inspiration.
288
324
 
289
325
  - [airbytehq/airbyte](https://github.com/airbytehq/airbyte)
290
- - *[code-generator/Dockerfile](https://github.com/airbytehq/airbyte/blob/master/tools/code-generator/Dockerfile)*
326
+ - *[Generate Python, Java/Kotlin, and Typescript protocol models](https://github.com/airbytehq/airbyte-protocol/tree/main/protocol-models/bin)*
291
327
  - [apache/iceberg](https://github.com/apache/iceberg)
292
328
  - *[Generate Python code](https://github.com/apache/iceberg/blob/d2e1094ee0cc6239d43f63ba5114272f59d605d2/open-api/README.md?plain=1#L39)*
293
329
  *[`make generate`](https://github.com/apache/iceberg/blob/d2e1094ee0cc6239d43f63ba5114272f59d605d2/open-api/Makefile#L24-L34)*
@@ -312,21 +348,6 @@ See the following linked projects for real world examples and inspiration.
312
348
  - [SeldonIO/MLServer](https://github.com/SeldonIO/MLServer)
313
349
  - *[generate-types.sh](https://github.com/SeldonIO/MLServer/blob/master/hack/generate-types.sh)*
314
350
 
315
- ## Supported input types
316
- - OpenAPI 3 (YAML/JSON, [OpenAPI Data Type](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#data-types));
317
- - JSON Schema ([JSON Schema Core](http://json-schema.org/draft/2019-09/json-schema-validation.html)/[JSON Schema Validation](http://json-schema.org/draft/2019-09/json-schema-validation.html));
318
- - JSON/YAML/CSV Data (it will be converted to JSON Schema);
319
- - Python dictionary (it will be converted to JSON Schema);
320
- - GraphQL schema ([GraphQL Schemas and Types](https://graphql.org/learn/schema/));
321
-
322
- ## Supported output types
323
- - [pydantic](https://docs.pydantic.dev/1.10/).BaseModel;
324
- - [pydantic_v2](https://docs.pydantic.dev/2.0/).BaseModel;
325
- - [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html);
326
- - [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict);
327
- - [msgspec.Struct](https://github.com/jcrist/msgspec);
328
- - Custom type from your [jinja2](https://jinja.palletsprojects.com/en/3.1.x/) template;
329
-
330
351
  ## Installation
331
352
 
332
353
  To install `datamodel-code-generator`:
@@ -364,6 +385,7 @@ This method needs the [http extra option](#http-extra-option)
364
385
  ## All Command Options
365
386
 
366
387
  The `datamodel-codegen` command:
388
+
367
389
  ```bash
368
390
  usage:
369
391
  datamodel-codegen [options]
@@ -374,7 +396,7 @@ Options:
374
396
  --http-headers HTTP_HEADER [HTTP_HEADER ...]
375
397
  Set headers in HTTP requests to the remote host.
376
398
  (example: "Authorization: Basic dXNlcjpwYXNz")
377
- --http-ignore-tls Disable verification of the remote host's TLS
399
+ --http-ignore-tls Disable verification of the remote host''s TLS
378
400
  certificate
379
401
  --http-query-parameters QUERY_PARAMETER [QUERY_PARAMETER ...]
380
402
  Set query parameters in HTTP requests to the remote host.
@@ -442,7 +464,7 @@ Field customization:
442
464
  e.g. underscores
443
465
  --snake-case-field Change camel-case field name to snake-case
444
466
  --special-field-name-prefix SPECIAL_FIELD_NAME_PREFIX
445
- Set field name prefix when first character can't be
467
+ Set field name prefix when first character can''t be
446
468
  used as Python field name (default: `field`)
447
469
  --strip-default-none Strip default None on fields
448
470
  --use-default Use default value even if a field is required
@@ -463,7 +485,7 @@ Model customization:
463
485
  Set class name of root model
464
486
  --collapse-root-models
465
487
  Models generated with a root-type field will be
466
- mergedinto the models using that root-type model
488
+ merged into the models using that root-type model
467
489
  --disable-appending-item-suffix
468
490
  Disable appending `Item` suffix to model name in an
469
491
  array
@@ -472,7 +494,7 @@ Model customization:
472
494
  Enable faux immutability
473
495
  --enable-version-header
474
496
  Enable package version on file headers
475
- --keep-model-order Keep generated models' order
497
+ --keep-model-order Keep generated models'' order
476
498
  --reuse-model Reuse models on the field when a module has the model
477
499
  with the same content
478
500
  --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11}
@@ -480,6 +502,9 @@ Model customization:
480
502
  --use-schema-description
481
503
  Use schema description to populate class docstring
482
504
  --use-title-as-name use titles as class names of models
505
+ --use-exact-imports Import exact types instead of modules, for example:
506
+ `from .foo import Bar` instead of
507
+ `from . import foo` with `foo.Bar`
483
508
 
484
509
  Template customization:
485
510
  --aliases ALIASES Alias mapping file
@@ -1,11 +1,11 @@
1
- datamodel_code_generator/__init__.py,sha256=zqe2mCNMAgFm9BGJHz8yTAVYVdK30Gt_pOnN7CjKvx4,17951
2
- datamodel_code_generator/__main__.py,sha256=klyeTja8UY57kcMvWXSp6aEOtNAOCX_biB0sqsCLFfE,19483
3
- datamodel_code_generator/arguments.py,sha256=kjTIK3mqF7fU5hJ0ZA-3axudgKjpkypLGsc4HikHqFQ,14941
1
+ datamodel_code_generator/__init__.py,sha256=nCulaRL1TE7CM_1ugD6QoijWwRStbYDp5_ncwv0UYEQ,18185
2
+ datamodel_code_generator/__main__.py,sha256=qk-vOZKHdNGWO58xPH8jG4arkmKT0wIPCIRjDwbguRg,19595
3
+ datamodel_code_generator/arguments.py,sha256=uLSXdRCfa5To1OW69RblOS4UWYzFqjGX9PQgOWEnc5c,15335
4
4
  datamodel_code_generator/format.py,sha256=yTSOf3-jUO4X5NOljAjm-4xYC_uBJV3-RLXUYIvw-yw,8595
5
5
  datamodel_code_generator/http.py,sha256=CwLVnXO4_W_fWKJsHnJp6Q_3GuF3qjCjeAe48Ihawrs,714
6
- datamodel_code_generator/imports.py,sha256=zswV5jMxWwr1-UO0beiyHT9ykhTYGnMTLtTQ6YVougQ,5529
6
+ datamodel_code_generator/imports.py,sha256=EUUgdLMD_724Jhp50-hQy2C6uMSQISGbJKhUfH3iIbo,5595
7
7
  datamodel_code_generator/model/__init__.py,sha256=A0CqnL87-lY_Te-n-99ya5v7h6l4jE6hOPP_itvcWOc,3091
8
- datamodel_code_generator/model/base.py,sha256=M1T8L7how4nW0aqGuunzc1LyWVSJ3OuFvRnx1Ig13hA,14019
8
+ datamodel_code_generator/model/base.py,sha256=3a7gTAI3e_2WMdgHyhKTWqs1qELqVb18HuwcBof-2C4,14059
9
9
  datamodel_code_generator/model/dataclass.py,sha256=9meJNWb-XPYYO8kDMSLhSfO6fulGWV7r3_XIM5kA71M,3965
10
10
  datamodel_code_generator/model/enum.py,sha256=Giehhtij2DZs2LssSJnG_CIIHsSA7Mkz471GU-Cb5kI,3338
11
11
  datamodel_code_generator/model/imports.py,sha256=9-0bd-DvZRjZkWMsmw-gykL8fzTd6M-vQEqS7Rm_ty4,725
@@ -17,7 +17,7 @@ datamodel_code_generator/model/pydantic/dataclass.py,sha256=sbqTmutl8Fjf1pYngfdv
17
17
  datamodel_code_generator/model/pydantic/imports.py,sha256=2nSLYwphBUMQEa0PTSNwoLjEBslu02EQb6BdZ-S51yk,2189
18
18
  datamodel_code_generator/model/pydantic/types.py,sha256=GDh1KRforpUIj58TSLSqrbKRnXi8O1qfHT8NaoUYvME,13034
19
19
  datamodel_code_generator/model/pydantic_v2/__init__.py,sha256=6IcvuP18MzTf3b6edPz6q1sIxzjuvmXbceQfPgkz25Q,947
20
- datamodel_code_generator/model/pydantic_v2/base_model.py,sha256=SURD7qLZkQaJOcFqmhcuNOCTb49WWcHPpo0RsjjD0SQ,8129
20
+ datamodel_code_generator/model/pydantic_v2/base_model.py,sha256=jaRPrnrc0UpcWeI7GjPjJomkh9Q9Ej6J9iEOyVq0IGM,8209
21
21
  datamodel_code_generator/model/pydantic_v2/imports.py,sha256=Q6XC6iE5v4LJvQ2DOXDGFtR-FnGPsaZ56KiiTiF2bIE,191
22
22
  datamodel_code_generator/model/pydantic_v2/root_model.py,sha256=ZlaNHfC8PuLJ1--OYm9AXbL1A95z1F421VoraP3mo70,951
23
23
  datamodel_code_generator/model/pydantic_v2/types.py,sha256=6KAJXO_SGs5bc8GQScCpowgqjaSmrn9W9iewpWIHack,1440
@@ -43,17 +43,18 @@ datamodel_code_generator/model/typed_dict.py,sha256=pzUaKLaVDF5rfxAaR1m4FqnhR00e
43
43
  datamodel_code_generator/model/types.py,sha256=Ti3cEtRQpBYgC4Y5ocAn6Ol-ZbnKG_P7C0nHBX9KtV8,2953
44
44
  datamodel_code_generator/model/union.py,sha256=loaVWQi-UHkV4gLfF2JhxLcgZRMsejaoJzGvjTlp_bo,1716
45
45
  datamodel_code_generator/parser/__init__.py,sha256=zHbw6RPlJC0SAQjb-XyVlyZhcOu5PfYgPidy6jlUM8M,793
46
- datamodel_code_generator/parser/base.py,sha256=y9lNLVYWu9SdBrLYgl2wGA-X9-0axtEp6E-1H-gUemQ,54375
47
- datamodel_code_generator/parser/graphql.py,sha256=n1AEOoJRbmK4Oq3_4M3VgvMaC12e3zBXFMXVrO-d2kI,20594
48
- datamodel_code_generator/parser/jsonschema.py,sha256=8bEBPFPwqQ4Qbxzjtfzh6E_KYBzH9zCHisg0IJOo2Bk,69658
49
- datamodel_code_generator/parser/openapi.py,sha256=bGLWGbFysBq4CtM-piuHero_EKW_T0-CS8miEj3Trqw,25623
46
+ datamodel_code_generator/parser/base.py,sha256=oFldzX2Qg3n7vEHzvd7QiJRuCgTg1zyyZKLySja1pac,59679
47
+ datamodel_code_generator/parser/graphql.py,sha256=0LF2wOqVMCKQlNKih8px3SN1iMzGgIf4d9TT58xOuAQ,21639
48
+ datamodel_code_generator/parser/jsonschema.py,sha256=RrPrZvegLvh2jFlXwT9zXA1MdUyWCsMMO8az5STB4UA,69987
49
+ datamodel_code_generator/parser/openapi.py,sha256=zCQr12pRE78PSRWhwE7a-2_IaqtC8y0ukMkipJAJYi8,25812
50
50
  datamodel_code_generator/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
+ datamodel_code_generator/pydantic_patch.py,sha256=XDVGIvMFrJKZmp5Zk74Id2YQeizIZ8B2WqCdUR_CB6U,560
51
52
  datamodel_code_generator/reference.py,sha256=EoalUNUP5us31bfMrHDa1iiKqxL8gHrpZKY_IxgZrOg,26347
52
- datamodel_code_generator/types.py,sha256=xPFX0NkgPz5CEz_yqKkZa7wPFr08P0e7X4Ooheoo2Uo,19652
53
+ datamodel_code_generator/types.py,sha256=Dadq241BMGcG3U23ZEsDTf3eUbuEr2K_8jxovKpWcGw,19631
53
54
  datamodel_code_generator/util.py,sha256=Lrjj20mmma4Glpfs42sCoGpt1cncalgUGyNZZviqWdU,3692
54
- datamodel_code_generator/version.py,sha256=ELV7HbvS0xRPP1KDziJGhzraG0NcEhduFjGDK9G43kM,24
55
- datamodel_code_generator-0.25.7.dist-info/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
56
- datamodel_code_generator-0.25.7.dist-info/METADATA,sha256=uR6VdHb6Kr1oLn7xxwZPzX6lk_kDEI3Zwo6xT2E4Puw,23339
57
- datamodel_code_generator-0.25.7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
58
- datamodel_code_generator-0.25.7.dist-info/entry_points.txt,sha256=bykbUWqOCiKfxJPGe8jpNqTqD1NG7uyRmozdnwzu7rk,76
59
- datamodel_code_generator-0.25.7.dist-info/RECORD,,
55
+ datamodel_code_generator/version.py,sha256=HM5IyJUGiVaohscoua19eQW54vpGj8ZoKuKvZkXX8GY,24
56
+ datamodel_code_generator-0.25.9.dist-info/LICENSE,sha256=K54Lwc6_jduycsy8oFFjQEeSSuEiqvVIjCGIXOMnuTQ,1068
57
+ datamodel_code_generator-0.25.9.dist-info/METADATA,sha256=37duhcTb2YAa6u6tU5pMNWbUsl3BueJN-TRlpGLMs1M,24216
58
+ datamodel_code_generator-0.25.9.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
59
+ datamodel_code_generator-0.25.9.dist-info/entry_points.txt,sha256=bykbUWqOCiKfxJPGe8jpNqTqD1NG7uyRmozdnwzu7rk,76
60
+ datamodel_code_generator-0.25.9.dist-info/RECORD,,