datamodel-code-generator 0.25.5__tar.gz → 0.25.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (59) hide show
  1. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/PKG-INFO +7 -1
  2. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/README.md +6 -0
  3. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/__init__.py +7 -1
  4. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/__main__.py +21 -0
  5. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/arguments.py +12 -0
  6. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/http.py +6 -1
  7. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/imports.py +3 -0
  8. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/msgspec.py +7 -2
  9. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/pydantic/types.py +14 -2
  10. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/pydantic_v2/__init__.py +2 -0
  11. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/pydantic_v2/base_model.py +11 -0
  12. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/msgspec.jinja2 +4 -2
  13. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 +1 -1
  14. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/types.py +2 -0
  15. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/parser/base.py +34 -6
  16. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/parser/graphql.py +4 -0
  17. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/parser/jsonschema.py +8 -0
  18. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/parser/openapi.py +4 -0
  19. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/types.py +2 -0
  20. datamodel_code_generator-0.25.7/datamodel_code_generator/version.py +1 -0
  21. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/pyproject.toml +2 -2
  22. datamodel_code_generator-0.25.5/datamodel_code_generator/version.py +0 -1
  23. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/LICENSE +0 -0
  24. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/format.py +0 -0
  25. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/__init__.py +0 -0
  26. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/base.py +0 -0
  27. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/dataclass.py +0 -0
  28. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/enum.py +0 -0
  29. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/imports.py +0 -0
  30. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/pydantic/__init__.py +0 -0
  31. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/pydantic/base_model.py +0 -0
  32. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/pydantic/custom_root_type.py +0 -0
  33. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/pydantic/dataclass.py +0 -0
  34. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/pydantic/imports.py +0 -0
  35. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/pydantic_v2/imports.py +0 -0
  36. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/pydantic_v2/root_model.py +0 -0
  37. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/pydantic_v2/types.py +0 -0
  38. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/rootmodel.py +0 -0
  39. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/scalar.py +0 -0
  40. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/Enum.jinja2 +0 -0
  41. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/Scalar.jinja2 +0 -0
  42. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/TypedDict.jinja2 +0 -0
  43. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/TypedDictClass.jinja2 +0 -0
  44. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/TypedDictFunction.jinja2 +0 -0
  45. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/Union.jinja2 +0 -0
  46. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/dataclass.jinja2 +0 -0
  47. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 +0 -0
  48. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 +0 -0
  49. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/pydantic/Config.jinja2 +0 -0
  50. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/pydantic/dataclass.jinja2 +0 -0
  51. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 +0 -0
  52. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 +0 -0
  53. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/template/root.jinja2 +0 -0
  54. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/typed_dict.py +0 -0
  55. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/model/union.py +0 -0
  56. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/parser/__init__.py +0 -0
  57. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/py.typed +0 -0
  58. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/reference.py +0 -0
  59. {datamodel_code_generator-0.25.5 → datamodel_code_generator-0.25.7}/datamodel_code_generator/util.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datamodel-code-generator
3
- Version: 0.25.5
3
+ Version: 0.25.7
4
4
  Summary: Datamodel Code Generator
5
5
  Home-page: https://github.com/koxudaxi/datamodel-code-generator
6
6
  License: MIT
@@ -376,6 +376,9 @@ Options:
376
376
  (example: "Authorization: Basic dXNlcjpwYXNz")
377
377
  --http-ignore-tls Disable verification of the remote host's TLS
378
378
  certificate
379
+ --http-query-parameters QUERY_PARAMETER [QUERY_PARAMETER ...]
380
+ Set query parameters in HTTP requests to the remote host.
381
+ (example: "ref=branch")
379
382
  --input INPUT Input file/directory (default: stdin)
380
383
  --input-file-type {auto,openapi,graphql,jsonschema,json,yaml,dict,csv}
381
384
  Input file type (default: auto)
@@ -447,6 +450,9 @@ Field customization:
447
450
  Fields that have default values.
448
451
  --use-field-description
449
452
  Use schema description to populate field docstring
453
+ --use-pendulum
454
+ Use pendulum instead of `datetime` for `date`,
455
+ `datetime`, and `time` data types
450
456
 
451
457
  Model customization:
452
458
  --allow-extra-fields Allow to pass extra fields, if this flag is not
@@ -331,6 +331,9 @@ Options:
331
331
  (example: "Authorization: Basic dXNlcjpwYXNz")
332
332
  --http-ignore-tls Disable verification of the remote host's TLS
333
333
  certificate
334
+ --http-query-parameters QUERY_PARAMETER [QUERY_PARAMETER ...]
335
+ Set query parameters in HTTP requests to the remote host.
336
+ (example: "ref=branch")
334
337
  --input INPUT Input file/directory (default: stdin)
335
338
  --input-file-type {auto,openapi,graphql,jsonschema,json,yaml,dict,csv}
336
339
  Input file type (default: auto)
@@ -402,6 +405,9 @@ Field customization:
402
405
  Fields that have default values.
403
406
  --use-field-description
404
407
  Use schema description to populate field docstring
408
+ --use-pendulum
409
+ Use pendulum instead of `datetime` for `date`,
410
+ `datetime`, and `time` data types
405
411
 
406
412
  Model customization:
407
413
  --allow-extra-fields Allow to pass extra fields, if this flag is not
@@ -299,6 +299,8 @@ def generate(
299
299
  custom_file_header_path: Optional[Path] = None,
300
300
  custom_formatters: Optional[List[str]] = None,
301
301
  custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
302
+ use_pendulum: bool = False,
303
+ http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
302
304
  ) -> None:
303
305
  remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
304
306
  if isinstance(input_, str):
@@ -308,7 +310,9 @@ def generate(
308
310
 
309
311
  input_text = remote_text_cache.get_or_put(
310
312
  input_.geturl(),
311
- default_factory=lambda url: get_body(url, http_headers, http_ignore_tls),
313
+ default_factory=lambda url: get_body(
314
+ url, http_headers, http_ignore_tls, http_query_parameters
315
+ ),
312
316
  )
313
317
  else:
314
318
  input_text = None
@@ -455,6 +459,8 @@ def generate(
455
459
  known_third_party=data_model_types.known_third_party,
456
460
  custom_formatters=custom_formatters,
457
461
  custom_formatters_kwargs=custom_formatters_kwargs,
462
+ use_pendulum=use_pendulum,
463
+ http_query_parameters=http_query_parameters,
458
464
  **kwargs,
459
465
  )
460
466
 
@@ -197,6 +197,23 @@ class Config(BaseModel):
197
197
  return [validate_each_item(each_item) for each_item in value]
198
198
  return value # pragma: no cover
199
199
 
200
+ @field_validator('http_query_parameters', mode='before')
201
+ def validate_http_query_parameters(
202
+ cls, value: Any
203
+ ) -> Optional[List[Tuple[str, str]]]:
204
+ def validate_each_item(each_item: Any) -> Tuple[str, str]:
205
+ if isinstance(each_item, str): # pragma: no cover
206
+ try:
207
+ field_name, field_value = each_item.split('=', maxsplit=1) # type: str, str
208
+ return field_name, field_value.lstrip()
209
+ except ValueError:
210
+ raise Error(f'Invalid http query parameter: {each_item!r}')
211
+ return each_item # pragma: no cover
212
+
213
+ if isinstance(value, list):
214
+ return [validate_each_item(each_item) for each_item in value]
215
+ return value # pragma: no cover
216
+
200
217
  @model_validator(mode='before')
201
218
  def validate_additional_imports(cls, values: Dict[str, Any]) -> Dict[str, Any]:
202
219
  if values.get('additional_imports') is not None:
@@ -293,6 +310,8 @@ class Config(BaseModel):
293
310
  custom_file_header_path: Optional[Path] = None
294
311
  custom_formatters: Optional[List[str]] = None
295
312
  custom_formatters_kwargs: Optional[TextIOBase] = None
313
+ use_pendulum: bool = False
314
+ http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None
296
315
 
297
316
  def merge_args(self, args: Namespace) -> None:
298
317
  set_args = {
@@ -487,6 +506,8 @@ def main(args: Optional[Sequence[str]] = None) -> Exit:
487
506
  custom_file_header_path=config.custom_file_header_path,
488
507
  custom_formatters=config.custom_formatters,
489
508
  custom_formatters_kwargs=custom_formatters_kwargs,
509
+ use_pendulum=config.use_pendulum,
510
+ http_query_parameters=config.http_query_parameters,
490
511
  )
491
512
  return Exit.OK
492
513
  except InvalidClassNameError as e:
@@ -57,6 +57,12 @@ base_options.add_argument(
57
57
  metavar='HTTP_HEADER',
58
58
  help='Set headers in HTTP requests to the remote host. (example: "Authorization: Basic dXNlcjpwYXNz")',
59
59
  )
60
+ base_options.add_argument(
61
+ '--http-query-parameters',
62
+ nargs='+',
63
+ metavar='HTTP_QUERY_PARAMETERS',
64
+ help='Set query parameters in HTTP requests to the remote host. (example: "ref=branch")',
65
+ )
60
66
  base_options.add_argument(
61
67
  '--http-ignore-tls',
62
68
  help="Disable verification of the remote host's TLS certificate",
@@ -166,6 +172,12 @@ model_options.add_argument(
166
172
  action='store_true',
167
173
  default=None,
168
174
  )
175
+ model_options.add_argument(
176
+ '--use-pendulum',
177
+ help='use pendulum instead of datetime',
178
+ action='store_true',
179
+ default=False,
180
+ )
169
181
 
170
182
  # ======================================================================================
171
183
  # Typing options for generated models
@@ -14,9 +14,14 @@ def get_body(
14
14
  url: str,
15
15
  headers: Optional[Sequence[Tuple[str, str]]] = None,
16
16
  ignore_tls: bool = False,
17
+ query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
17
18
  ) -> str:
18
19
  return httpx.get(
19
- url, headers=headers, verify=not ignore_tls, follow_redirects=True
20
+ url,
21
+ headers=headers,
22
+ verify=not ignore_tls,
23
+ follow_redirects=True,
24
+ params=query_parameters,
20
25
  ).text
21
26
 
22
27
 
@@ -119,3 +119,6 @@ IMPORT_DATETIME = Import.from_full_path('datetime.datetime')
119
119
  IMPORT_PATH = Import.from_full_path('pathlib.Path')
120
120
  IMPORT_TIME = Import.from_full_path('datetime.time')
121
121
  IMPORT_UUID = Import.from_full_path('uuid.UUID')
122
+ IMPORT_PENDULUM_DATE = Import.from_full_path('pendulum.Date')
123
+ IMPORT_PENDULUM_DATETIME = Import.from_full_path('pendulum.DateTime')
124
+ IMPORT_PENDULUM_TIME = Import.from_full_path('pendulum.Time')
@@ -33,7 +33,7 @@ from datamodel_code_generator.types import chain_as_tuple, get_optional_type
33
33
 
34
34
 
35
35
  def _has_field_assignment(field: DataModelFieldBase) -> bool:
36
- return bool(field.field) or not (
36
+ return not (
37
37
  field.required
38
38
  or (field.represented_default == 'None' and field.strip_default_none)
39
39
  )
@@ -48,7 +48,9 @@ def import_extender(cls: Type[DataModelFieldBaseT]) -> Type[DataModelFieldBaseT]
48
48
  @wraps(original_imports.fget) # type: ignore
49
49
  def new_imports(self: DataModelFieldBaseT) -> Tuple[Import, ...]:
50
50
  extra_imports = []
51
- if self.field:
51
+ field = self.field
52
+ # TODO: Improve field detection
53
+ if field and field.startswith('field('):
52
54
  extra_imports.append(IMPORT_MSGSPEC_FIELD)
53
55
  if self.field and 'lambda: convert' in self.field:
54
56
  extra_imports.append(IMPORT_MSGSPEC_CONVERT)
@@ -126,6 +128,7 @@ class DataModelField(DataModelFieldBase):
126
128
  'min_length',
127
129
  'max_length',
128
130
  'pattern',
131
+ 'examples',
129
132
  # 'unique_items', # not supported by msgspec
130
133
  }
131
134
  _PARSE_METHOD = 'convert'
@@ -176,6 +179,8 @@ class DataModelField(DataModelFieldBase):
176
179
 
177
180
  if self.default != UNDEFINED and self.default is not None:
178
181
  data['default'] = self.default
182
+ elif not self.required:
183
+ data['default'] = None
179
184
 
180
185
  if self.required:
181
186
  data = {
@@ -10,6 +10,9 @@ from datamodel_code_generator.imports import (
10
10
  IMPORT_DATETIME,
11
11
  IMPORT_DECIMAL,
12
12
  IMPORT_PATH,
13
+ IMPORT_PENDULUM_DATE,
14
+ IMPORT_PENDULUM_DATETIME,
15
+ IMPORT_PENDULUM_TIME,
13
16
  IMPORT_TIME,
14
17
  IMPORT_UUID,
15
18
  )
@@ -53,11 +56,12 @@ def type_map_factory(
53
56
  data_type: Type[DataType],
54
57
  strict_types: Sequence[StrictTypes],
55
58
  pattern_key: str,
59
+ use_pendulum: bool,
56
60
  ) -> Dict[Types, DataType]:
57
61
  data_type_int = data_type(type='int')
58
62
  data_type_float = data_type(type='float')
59
63
  data_type_str = data_type(type='str')
60
- return {
64
+ result = {
61
65
  Types.integer: data_type_int,
62
66
  Types.int32: data_type_int,
63
67
  Types.int64: data_type_int,
@@ -100,6 +104,12 @@ def type_map_factory(
100
104
  Types.array: data_type.from_import(IMPORT_ANY, is_list=True),
101
105
  Types.any: data_type.from_import(IMPORT_ANY),
102
106
  }
107
+ if use_pendulum:
108
+ result[Types.date] = data_type.from_import(IMPORT_PENDULUM_DATE)
109
+ result[Types.date_time] = data_type.from_import(IMPORT_PENDULUM_DATETIME)
110
+ result[Types.time] = data_type.from_import(IMPORT_PENDULUM_TIME)
111
+
112
+ return result
103
113
 
104
114
 
105
115
  def strict_type_map_factory(data_type: Type[DataType]) -> Dict[StrictTypes, DataType]:
@@ -147,6 +157,7 @@ class DataTypeManager(_DataTypeManager):
147
157
  strict_types: Optional[Sequence[StrictTypes]] = None,
148
158
  use_non_positive_negative_number_constrained_types: bool = False,
149
159
  use_union_operator: bool = False,
160
+ use_pendulum: bool = False,
150
161
  ):
151
162
  super().__init__(
152
163
  python_version,
@@ -155,6 +166,7 @@ class DataTypeManager(_DataTypeManager):
155
166
  strict_types,
156
167
  use_non_positive_negative_number_constrained_types,
157
168
  use_union_operator,
169
+ use_pendulum,
158
170
  )
159
171
 
160
172
  self.type_map: Dict[Types, DataType] = self.type_map_factory(
@@ -185,7 +197,7 @@ class DataTypeManager(_DataTypeManager):
185
197
  strict_types: Sequence[StrictTypes],
186
198
  pattern_key: str,
187
199
  ) -> Dict[Types, DataType]:
188
- return type_map_factory(data_type, strict_types, pattern_key)
200
+ return type_map_factory(data_type, strict_types, pattern_key, self.use_pendulum)
189
201
 
190
202
  def transform_kwargs(
191
203
  self, kwargs: Dict[str, Any], filter_: Set[str]
@@ -18,9 +18,11 @@ class ConfigDict(_BaseModel):
18
18
  title: Optional[str] = None
19
19
  populate_by_name: Optional[bool] = None
20
20
  allow_extra_fields: Optional[bool] = None
21
+ from_attributes: Optional[bool] = None
21
22
  frozen: Optional[bool] = None
22
23
  arbitrary_types_allowed: Optional[bool] = None
23
24
  protected_namespaces: Optional[Tuple[str, ...]] = None
25
+ regex_engine: Optional[str] = None
24
26
 
25
27
 
26
28
  __all__ = [
@@ -1,3 +1,4 @@
1
+ import re
1
2
  from pathlib import Path
2
3
  from typing import (
3
4
  TYPE_CHECKING,
@@ -218,6 +219,16 @@ class BaseModel(BaseModelBase):
218
219
  config_parameters['arbitrary_types_allowed'] = True
219
220
  break
220
221
 
222
+ for field in self.fields:
223
+ # Check if a regex pattern uses lookarounds.
224
+ # Depending on the generation configuration, the pattern may end up in two different places.
225
+ pattern = (
226
+ isinstance(field.constraints, Constraints) and field.constraints.pattern
227
+ ) or (field.data_type.kwargs or {}).get('pattern')
228
+ if pattern and re.search(r'\(\?<?[=!]', pattern):
229
+ config_parameters['regex_engine'] = '"python-re"'
230
+ break
231
+
221
232
  if isinstance(self.extra_template_data.get('config'), dict):
222
233
  for key, value in self.extra_template_data['config'].items():
223
234
  config_parameters[key] = value
@@ -18,12 +18,14 @@ class {{ class_name }}:
18
18
  {%- if not field.annotated and field.field %}
19
19
  {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
20
20
  {%- else %}
21
- {%- if field.annotated %}
21
+ {%- if field.annotated and not field.field %}
22
22
  {{ field.name }}: {{ field.annotated }}
23
+ {%- elif field.annotated and field.field %}
24
+ {{ field.name }}: {{ field.annotated }} = {{ field.field }}
23
25
  {%- else %}
24
26
  {{ field.name }}: {{ field.type_hint }}
25
27
  {%- endif %}
26
- {%- if not field.required or field.data_type.is_optional or field.nullable
28
+ {%- if not field.field and (not field.required or field.data_type.is_optional or field.nullable)
27
29
  %} = {{ field.represented_default }}
28
30
  {%- endif -%}
29
31
  {%- endif %}
@@ -10,7 +10,7 @@
10
10
  {{ decorator }}
11
11
  {% endfor -%}
12
12
 
13
- class {{ class_name }}({{ base_class }}[{{get_type_hint(fields)}}]):{% if comment is defined %} # {{ comment }}{% endif %}
13
+ class {{ class_name }}({{ base_class }}{%- if fields -%}[{{get_type_hint(fields)}}]{%- endif -%}):{% if comment is defined %} # {{ comment }}{% endif %}
14
14
  {%- if description %}
15
15
  """
16
16
  {{ description | indent(4) }}
@@ -58,6 +58,7 @@ class DataTypeManager(_DataTypeManager):
58
58
  strict_types: Optional[Sequence[StrictTypes]] = None,
59
59
  use_non_positive_negative_number_constrained_types: bool = False,
60
60
  use_union_operator: bool = False,
61
+ use_pendulum: bool = False,
61
62
  ):
62
63
  super().__init__(
63
64
  python_version,
@@ -66,6 +67,7 @@ class DataTypeManager(_DataTypeManager):
66
67
  strict_types,
67
68
  use_non_positive_negative_number_constrained_types,
68
69
  use_union_operator,
70
+ use_pendulum,
69
71
  )
70
72
 
71
73
  self.type_map: Dict[Types, DataType] = type_map_factory(
@@ -390,6 +390,8 @@ class Parser(ABC):
390
390
  known_third_party: Optional[List[str]] = None,
391
391
  custom_formatters: Optional[List[str]] = None,
392
392
  custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
393
+ use_pendulum: bool = False,
394
+ http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
393
395
  ) -> None:
394
396
  self.data_type_manager: DataTypeManager = data_type_manager_type(
395
397
  python_version=target_python_version,
@@ -397,6 +399,7 @@ class Parser(ABC):
397
399
  use_generic_container_types=use_generic_container_types,
398
400
  strict_types=strict_types,
399
401
  use_union_operator=use_union_operator,
402
+ use_pendulum=use_pendulum,
400
403
  )
401
404
  self.data_model_type: Type[DataModel] = data_model_type
402
405
  self.data_model_root_type: Type[DataModel] = data_model_root_type
@@ -490,6 +493,9 @@ class Parser(ABC):
490
493
  self.class_name: Optional[str] = class_name
491
494
  self.wrap_string_literal: Optional[bool] = wrap_string_literal
492
495
  self.http_headers: Optional[Sequence[Tuple[str, str]]] = http_headers
496
+ self.http_query_parameters: Optional[Sequence[Tuple[str, str]]] = (
497
+ http_query_parameters
498
+ )
493
499
  self.http_ignore_tls: bool = http_ignore_tls
494
500
  self.use_annotated: bool = use_annotated
495
501
  if self.use_annotated and not self.field_constraints: # pragma: no cover
@@ -547,7 +553,7 @@ class Parser(ABC):
547
553
  return self.remote_text_cache.get_or_put(
548
554
  url,
549
555
  default_factory=lambda url_: get_body(
550
- url, self.http_headers, self.http_ignore_tls
556
+ url, self.http_headers, self.http_ignore_tls, self.http_query_parameters
551
557
  ),
552
558
  )
553
559
 
@@ -671,7 +677,8 @@ class Parser(ABC):
671
677
  for model in models:
672
678
  scoped_model_resolver.add(model.path, model.class_name)
673
679
  for model in models:
674
- imports.append(model.imports)
680
+ before_import = model.imports
681
+ imports.append(before_import)
675
682
  for data_type in model.all_data_types:
676
683
  # To change from/import
677
684
 
@@ -681,13 +688,19 @@ class Parser(ABC):
681
688
  continue
682
689
 
683
690
  if isinstance(data_type, BaseClassDataType):
684
- from_ = ''.join(relative(model.module_name, data_type.full_name))
691
+ left, right = relative(model.module_name, data_type.full_name)
692
+ from_ = (
693
+ ''.join([left, right])
694
+ if left.endswith('.')
695
+ else '.'.join([left, right])
696
+ )
685
697
  import_ = data_type.reference.short_name
686
698
  full_path = from_, import_
687
699
  else:
688
700
  from_, import_ = full_path = relative(
689
701
  model.module_name, data_type.full_name
690
702
  )
703
+ import_ = import_.replace('-', '_')
691
704
 
692
705
  alias = scoped_model_resolver.add(full_path, import_).name
693
706
 
@@ -695,7 +708,7 @@ class Parser(ABC):
695
708
  if from_ and import_ and alias != name:
696
709
  data_type.alias = (
697
710
  alias
698
- if from_ == '.' and data_type.full_name == import_
711
+ if data_type.reference.short_name == import_
699
712
  else f'{alias}.{name}'
700
713
  )
701
714
 
@@ -709,6 +722,9 @@ class Parser(ABC):
709
722
  reference_path=data_type.reference.path,
710
723
  ),
711
724
  )
725
+ after_import = model.imports
726
+ if before_import != after_import:
727
+ imports.append(after_import)
712
728
 
713
729
  @classmethod
714
730
  def __extract_inherited_enum(cls, models: List[DataModel]) -> None:
@@ -763,8 +779,18 @@ class Parser(ABC):
763
779
  discriminator_model.path.split('#/')[-1]
764
780
  != path.split('#/')[-1]
765
781
  ):
766
- # TODO: support external reference
767
- continue
782
+ if (
783
+ path.startswith('#/')
784
+ or discriminator_model.path[:-1]
785
+ != path.split('/')[-1]
786
+ ):
787
+ t_path = path[str(path).find('/') + 1 :]
788
+ t_disc = discriminator_model.path[
789
+ : str(discriminator_model.path).find('#')
790
+ ].lstrip('../')
791
+ t_disc_2 = '/'.join(t_disc.split('/')[1:])
792
+ if t_path != t_disc and t_path != t_disc_2:
793
+ continue
768
794
  type_names.append(name)
769
795
  else:
770
796
  type_names = [discriminator_model.path.split('/')[-1]]
@@ -891,6 +917,7 @@ class Parser(ABC):
891
917
  name=model.name,
892
918
  path=model.reference.path + '/reuse',
893
919
  ),
920
+ custom_template_dir=model._custom_template_dir,
894
921
  )
895
922
  if cached_model_reference.path in require_update_action_models:
896
923
  require_update_action_models.append(inherited_model.path)
@@ -1234,6 +1261,7 @@ class Parser(ABC):
1234
1261
  init = True
1235
1262
  else:
1236
1263
  module = (*module[:-1], f'{module[-1]}.py')
1264
+ module = tuple(part.replace('-', '_') for part in module)
1237
1265
  else:
1238
1266
  module = ('__init__.py',)
1239
1267
 
@@ -156,6 +156,8 @@ class GraphQLParser(Parser):
156
156
  known_third_party: Optional[List[str]] = None,
157
157
  custom_formatters: Optional[List[str]] = None,
158
158
  custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
159
+ use_pendulum: bool = False,
160
+ http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
159
161
  ) -> None:
160
162
  super().__init__(
161
163
  source=source,
@@ -221,6 +223,8 @@ class GraphQLParser(Parser):
221
223
  known_third_party=known_third_party,
222
224
  custom_formatters=custom_formatters,
223
225
  custom_formatters_kwargs=custom_formatters_kwargs,
226
+ use_pendulum=use_pendulum,
227
+ http_query_parameters=http_query_parameters,
224
228
  )
225
229
 
226
230
  self.data_model_scalar_type = data_model_scalar_type
@@ -438,6 +438,8 @@ class JsonSchemaParser(Parser):
438
438
  known_third_party: Optional[List[str]] = None,
439
439
  custom_formatters: Optional[List[str]] = None,
440
440
  custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
441
+ use_pendulum: bool = False,
442
+ http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
441
443
  ) -> None:
442
444
  super().__init__(
443
445
  source=source,
@@ -503,6 +505,8 @@ class JsonSchemaParser(Parser):
503
505
  known_third_party=known_third_party,
504
506
  custom_formatters=custom_formatters,
505
507
  custom_formatters_kwargs=custom_formatters_kwargs,
508
+ use_pendulum=use_pendulum,
509
+ http_query_parameters=http_query_parameters,
506
510
  )
507
511
 
508
512
  self.remote_object_cache: DefaultPutDict[str, Dict[str, Any]] = DefaultPutDict()
@@ -1307,6 +1311,10 @@ class JsonSchemaParser(Parser):
1307
1311
  data_type = self.data_type_manager.get_data_type_from_full_path(
1308
1312
  obj.custom_type_path, is_custom_type=True
1309
1313
  )
1314
+ elif obj.is_array:
1315
+ data_type = self.parse_array_fields(
1316
+ name, obj, get_special_path('array', path)
1317
+ ).data_type
1310
1318
  elif obj.anyOf or obj.oneOf:
1311
1319
  reference = self.model_resolver.add(
1312
1320
  path, name, loaded=True, class_name=True
@@ -220,6 +220,8 @@ class OpenAPIParser(JsonSchemaParser):
220
220
  known_third_party: Optional[List[str]] = None,
221
221
  custom_formatters: Optional[List[str]] = None,
222
222
  custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
223
+ use_pendulum: bool = False,
224
+ http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
223
225
  ):
224
226
  super().__init__(
225
227
  source=source,
@@ -285,6 +287,8 @@ class OpenAPIParser(JsonSchemaParser):
285
287
  known_third_party=known_third_party,
286
288
  custom_formatters=custom_formatters,
287
289
  custom_formatters_kwargs=custom_formatters_kwargs,
290
+ use_pendulum=use_pendulum,
291
+ http_query_parameters=http_query_parameters,
288
292
  )
289
293
  self.open_api_scopes: List[OpenAPIScope] = openapi_scopes or [
290
294
  OpenAPIScope.Schemas
@@ -578,6 +578,7 @@ class DataTypeManager(ABC):
578
578
  strict_types: Optional[Sequence[StrictTypes]] = None,
579
579
  use_non_positive_negative_number_constrained_types: bool = False,
580
580
  use_union_operator: bool = False,
581
+ use_pendulum: bool = False,
581
582
  ) -> None:
582
583
  self.python_version = python_version
583
584
  self.use_standard_collections: bool = use_standard_collections
@@ -587,6 +588,7 @@ class DataTypeManager(ABC):
587
588
  use_non_positive_negative_number_constrained_types
588
589
  )
589
590
  self.use_union_operator: bool = use_union_operator
591
+ self.use_pendulum: bool = use_pendulum
590
592
 
591
593
  if (
592
594
  use_generic_container_types and python_version == PythonVersion.PY_36
@@ -0,0 +1 @@
1
+ version: str = '0.25.7'
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "datamodel-code-generator"
3
- version = "0.25.5"
3
+ version = "0.25.7"
4
4
  description = "Datamodel Code Generator"
5
5
  authors = ["Koudai Aono <koxudaxi@gmail.com>"]
6
6
  readme = "README.md"
@@ -78,7 +78,7 @@ types-setuptools = ">=67.6.0.5,<70.0.0.0"
78
78
  pydantic = "*"
79
79
  httpx = ">=0.24.1"
80
80
  PySnooper = "*"
81
- ruff = ">=0.0.290,<0.3.3"
81
+ ruff = ">=0.0.290,<0.4.8"
82
82
  ruff-lsp = ">=0.0.39,<0.0.41"
83
83
  pre-commit = "*"
84
84
  pytest-xdist = "^3.3.1"
@@ -1 +0,0 @@
1
- version: str = '0.25.5'