datamodel-code-generator 0.27.2__py3-none-any.whl → 0.28.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamodel-code-generator might be problematic. Click here for more details.

Files changed (43) hide show
  1. datamodel_code_generator/__init__.py +168 -196
  2. datamodel_code_generator/__main__.py +146 -189
  3. datamodel_code_generator/arguments.py +227 -230
  4. datamodel_code_generator/format.py +77 -129
  5. datamodel_code_generator/http.py +12 -10
  6. datamodel_code_generator/imports.py +59 -65
  7. datamodel_code_generator/model/__init__.py +28 -31
  8. datamodel_code_generator/model/base.py +100 -144
  9. datamodel_code_generator/model/dataclass.py +62 -70
  10. datamodel_code_generator/model/enum.py +34 -30
  11. datamodel_code_generator/model/imports.py +13 -11
  12. datamodel_code_generator/model/msgspec.py +116 -138
  13. datamodel_code_generator/model/pydantic/__init__.py +18 -28
  14. datamodel_code_generator/model/pydantic/base_model.py +121 -140
  15. datamodel_code_generator/model/pydantic/custom_root_type.py +2 -2
  16. datamodel_code_generator/model/pydantic/dataclass.py +6 -4
  17. datamodel_code_generator/model/pydantic/imports.py +35 -33
  18. datamodel_code_generator/model/pydantic/types.py +91 -119
  19. datamodel_code_generator/model/pydantic_v2/__init__.py +21 -18
  20. datamodel_code_generator/model/pydantic_v2/base_model.py +118 -127
  21. datamodel_code_generator/model/pydantic_v2/imports.py +5 -3
  22. datamodel_code_generator/model/pydantic_v2/root_model.py +6 -6
  23. datamodel_code_generator/model/pydantic_v2/types.py +11 -7
  24. datamodel_code_generator/model/rootmodel.py +1 -1
  25. datamodel_code_generator/model/scalar.py +33 -32
  26. datamodel_code_generator/model/typed_dict.py +41 -51
  27. datamodel_code_generator/model/types.py +24 -19
  28. datamodel_code_generator/model/union.py +21 -17
  29. datamodel_code_generator/parser/__init__.py +16 -12
  30. datamodel_code_generator/parser/base.py +327 -515
  31. datamodel_code_generator/parser/graphql.py +87 -119
  32. datamodel_code_generator/parser/jsonschema.py +438 -607
  33. datamodel_code_generator/parser/openapi.py +180 -220
  34. datamodel_code_generator/pydantic_patch.py +8 -9
  35. datamodel_code_generator/reference.py +199 -297
  36. datamodel_code_generator/types.py +149 -215
  37. datamodel_code_generator/util.py +23 -36
  38. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/METADATA +10 -5
  39. datamodel_code_generator-0.28.0.dist-info/RECORD +59 -0
  40. datamodel_code_generator-0.27.2.dist-info/RECORD +0 -59
  41. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/WHEEL +0 -0
  42. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/entry_points.txt +0 -0
  43. {datamodel_code_generator-0.27.2.dist-info → datamodel_code_generator-0.28.0.dist-info}/licenses/LICENSE +0 -0
@@ -2,20 +2,9 @@ from __future__ import annotations
2
2
 
3
3
  from pathlib import Path
4
4
  from typing import (
5
+ TYPE_CHECKING,
5
6
  Any,
6
7
  Callable,
7
- DefaultDict,
8
- Dict,
9
- Iterable,
10
- Iterator,
11
- List,
12
- Mapping,
13
- Optional,
14
- Sequence,
15
- Set,
16
- Tuple,
17
- Type,
18
- Union,
19
8
  )
20
9
  from urllib.parse import ParseResult
21
10
 
@@ -23,6 +12,7 @@ from datamodel_code_generator import (
23
12
  DefaultPutDict,
24
13
  LiteralType,
25
14
  PythonVersion,
15
+ PythonVersionMin,
26
16
  snooper_to_methods,
27
17
  )
28
18
  from datamodel_code_generator.model import DataModel, DataModelFieldBase
@@ -41,13 +31,17 @@ from datamodel_code_generator.types import DataTypeManager, StrictTypes, Types
41
31
 
42
32
  try:
43
33
  import graphql
44
- except ImportError: # pragma: no cover
45
- raise Exception(
46
- "Please run `$pip install 'datamodel-code-generator[graphql]`' to generate data-model from a GraphQL schema."
47
- )
34
+ except ImportError as exc: # pragma: no cover
35
+ msg = "Please run `$pip install 'datamodel-code-generator[graphql]`' to generate data-model from a GraphQL schema."
36
+ raise Exception(msg) from exc # noqa: TRY002
37
+
48
38
 
49
39
  from datamodel_code_generator.format import DatetimeClassType
50
40
 
41
+ if TYPE_CHECKING:
42
+ from collections import defaultdict
43
+ from collections.abc import Iterable, Iterator, Mapping, Sequence
44
+
51
45
  graphql_resolver = graphql.type.introspection.TypeResolvers()
52
46
 
53
47
 
@@ -57,26 +51,24 @@ def build_graphql_schema(schema_str: str) -> graphql.GraphQLSchema:
57
51
  return graphql.lexicographic_sort_schema(schema)
58
52
 
59
53
 
60
- @snooper_to_methods(max_variable_length=None)
54
+ @snooper_to_methods()
61
55
  class GraphQLParser(Parser):
62
56
  # raw graphql schema as `graphql-core` object
63
57
  raw_obj: graphql.GraphQLSchema
64
58
  # all processed graphql objects
65
59
  # mapper from an object name (unique) to an object
66
- all_graphql_objects: Dict[str, graphql.GraphQLNamedType]
60
+ all_graphql_objects: dict[str, graphql.GraphQLNamedType]
67
61
  # a reference for each object
68
62
  # mapper from an object name to his reference
69
- references: Dict[str, Reference] = {}
63
+ references: dict[str, Reference] = {} # noqa: RUF012
70
64
  # mapper from graphql type to all objects with this type
71
65
  # `graphql.type.introspection.TypeKind` -- an enum with all supported types
72
66
  # `graphql.GraphQLNamedType` -- base type for each graphql object
73
67
  # see `graphql-core` for more details
74
- support_graphql_types: Dict[
75
- graphql.type.introspection.TypeKind, List[graphql.GraphQLNamedType]
76
- ]
68
+ support_graphql_types: dict[graphql.type.introspection.TypeKind, list[graphql.GraphQLNamedType]]
77
69
  # graphql types order for render
78
70
  # may be as a parameter in the future
79
- parse_order: List[graphql.type.introspection.TypeKind] = [
71
+ parse_order: list[graphql.type.introspection.TypeKind] = [ # noqa: RUF012
80
72
  graphql.type.introspection.TypeKind.SCALAR,
81
73
  graphql.type.introspection.TypeKind.ENUM,
82
74
  graphql.type.introspection.TypeKind.INTERFACE,
@@ -85,79 +77,79 @@ class GraphQLParser(Parser):
85
77
  graphql.type.introspection.TypeKind.UNION,
86
78
  ]
87
79
 
88
- def __init__(
80
+ def __init__( # noqa: PLR0913
89
81
  self,
90
- source: Union[str, Path, ParseResult],
82
+ source: str | Path | ParseResult,
91
83
  *,
92
- data_model_type: Type[DataModel] = pydantic_model.BaseModel,
93
- data_model_root_type: Type[DataModel] = pydantic_model.CustomRootType,
94
- data_model_scalar_type: Type[DataModel] = DataTypeScalar,
95
- data_model_union_type: Type[DataModel] = DataTypeUnion,
96
- data_type_manager_type: Type[DataTypeManager] = pydantic_model.DataTypeManager,
97
- data_model_field_type: Type[DataModelFieldBase] = pydantic_model.DataModelField,
98
- base_class: Optional[str] = None,
99
- additional_imports: Optional[List[str]] = None,
100
- custom_template_dir: Optional[Path] = None,
101
- extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
102
- target_python_version: PythonVersion = PythonVersion.PY_38,
103
- dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]] = None,
84
+ data_model_type: type[DataModel] = pydantic_model.BaseModel,
85
+ data_model_root_type: type[DataModel] = pydantic_model.CustomRootType,
86
+ data_model_scalar_type: type[DataModel] = DataTypeScalar,
87
+ data_model_union_type: type[DataModel] = DataTypeUnion,
88
+ data_type_manager_type: type[DataTypeManager] = pydantic_model.DataTypeManager,
89
+ data_model_field_type: type[DataModelFieldBase] = pydantic_model.DataModelField,
90
+ base_class: str | None = None,
91
+ additional_imports: list[str] | None = None,
92
+ custom_template_dir: Path | None = None,
93
+ extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
94
+ target_python_version: PythonVersion = PythonVersionMin,
95
+ dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
104
96
  validation: bool = False,
105
97
  field_constraints: bool = False,
106
98
  snake_case_field: bool = False,
107
99
  strip_default_none: bool = False,
108
- aliases: Optional[Mapping[str, str]] = None,
100
+ aliases: Mapping[str, str] | None = None,
109
101
  allow_population_by_field_name: bool = False,
110
102
  apply_default_values_for_required_fields: bool = False,
111
103
  allow_extra_fields: bool = False,
112
104
  force_optional_for_required_fields: bool = False,
113
- class_name: Optional[str] = None,
105
+ class_name: str | None = None,
114
106
  use_standard_collections: bool = False,
115
- base_path: Optional[Path] = None,
107
+ base_path: Path | None = None,
116
108
  use_schema_description: bool = False,
117
109
  use_field_description: bool = False,
118
110
  use_default_kwarg: bool = False,
119
111
  reuse_model: bool = False,
120
- encoding: str = 'utf-8',
121
- enum_field_as_literal: Optional[LiteralType] = None,
112
+ encoding: str = "utf-8",
113
+ enum_field_as_literal: LiteralType | None = None,
122
114
  set_default_enum_member: bool = False,
123
115
  use_subclass_enum: bool = False,
124
116
  strict_nullable: bool = False,
125
117
  use_generic_container_types: bool = False,
126
118
  enable_faux_immutability: bool = False,
127
- remote_text_cache: Optional[DefaultPutDict[str, str]] = None,
119
+ remote_text_cache: DefaultPutDict[str, str] | None = None,
128
120
  disable_appending_item_suffix: bool = False,
129
- strict_types: Optional[Sequence[StrictTypes]] = None,
130
- empty_enum_field_name: Optional[str] = None,
131
- custom_class_name_generator: Optional[Callable[[str], str]] = None,
132
- field_extra_keys: Optional[Set[str]] = None,
121
+ strict_types: Sequence[StrictTypes] | None = None,
122
+ empty_enum_field_name: str | None = None,
123
+ custom_class_name_generator: Callable[[str], str] | None = None,
124
+ field_extra_keys: set[str] | None = None,
133
125
  field_include_all_keys: bool = False,
134
- field_extra_keys_without_x_prefix: Optional[Set[str]] = None,
135
- wrap_string_literal: Optional[bool] = None,
126
+ field_extra_keys_without_x_prefix: set[str] | None = None,
127
+ wrap_string_literal: bool | None = None,
136
128
  use_title_as_name: bool = False,
137
129
  use_operation_id_as_name: bool = False,
138
130
  use_unique_items_as_set: bool = False,
139
- http_headers: Optional[Sequence[Tuple[str, str]]] = None,
131
+ http_headers: Sequence[tuple[str, str]] | None = None,
140
132
  http_ignore_tls: bool = False,
141
133
  use_annotated: bool = False,
142
134
  use_non_positive_negative_number_constrained_types: bool = False,
143
- original_field_name_delimiter: Optional[str] = None,
135
+ original_field_name_delimiter: str | None = None,
144
136
  use_double_quotes: bool = False,
145
137
  use_union_operator: bool = False,
146
138
  allow_responses_without_content: bool = False,
147
139
  collapse_root_models: bool = False,
148
- special_field_name_prefix: Optional[str] = None,
140
+ special_field_name_prefix: str | None = None,
149
141
  remove_special_field_name_prefix: bool = False,
150
142
  capitalise_enum_members: bool = False,
151
143
  keep_model_order: bool = False,
152
144
  use_one_literal_as_default: bool = False,
153
- known_third_party: Optional[List[str]] = None,
154
- custom_formatters: Optional[List[str]] = None,
155
- custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
145
+ known_third_party: list[str] | None = None,
146
+ custom_formatters: list[str] | None = None,
147
+ custom_formatters_kwargs: dict[str, Any] | None = None,
156
148
  use_pendulum: bool = False,
157
- http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
149
+ http_query_parameters: Sequence[tuple[str, str]] | None = None,
158
150
  treat_dots_as_module: bool = False,
159
151
  use_exact_imports: bool = False,
160
- default_field_extras: Optional[Dict[str, Any]] = None,
152
+ default_field_extras: dict[str, Any] | None = None,
161
153
  target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
162
154
  keyword_only: bool = False,
163
155
  no_alias: bool = False,
@@ -241,7 +233,7 @@ class GraphQLParser(Parser):
241
233
  self.use_standard_collections = use_standard_collections
242
234
  self.use_union_operator = use_union_operator
243
235
 
244
- def _get_context_source_path_parts(self) -> Iterator[Tuple[Source, List[str]]]:
236
+ def _get_context_source_path_parts(self) -> Iterator[tuple[Source, list[str]]]:
245
237
  # TODO (denisart): Temporarily this method duplicates
246
238
  # the method `datamodel_code_generator.parser.jsonschema.JsonSchemaParser._get_context_source_path_parts`.
247
239
 
@@ -250,8 +242,7 @@ class GraphQLParser(Parser):
250
242
  ): # pragma: no cover
251
243
  self.current_source_path = Path()
252
244
  self.model_resolver.after_load_files = {
253
- self.base_path.joinpath(s.path).resolve().as_posix()
254
- for s in self.iter_source
245
+ self.base_path.joinpath(s.path).resolve().as_posix() for s in self.iter_source
255
246
  }
256
247
 
257
248
  for source in self.iter_source:
@@ -261,17 +252,18 @@ class GraphQLParser(Parser):
261
252
  path_parts = list(source.path.parts)
262
253
  if self.current_source_path is not None: # pragma: no cover
263
254
  self.current_source_path = source.path
264
- with self.model_resolver.current_base_path_context(
265
- source.path.parent
266
- ), self.model_resolver.current_root_context(path_parts):
255
+ with (
256
+ self.model_resolver.current_base_path_context(source.path.parent),
257
+ self.model_resolver.current_root_context(path_parts),
258
+ ):
267
259
  yield source, path_parts
268
260
 
269
- def _resolve_types(self, paths: List[str], schema: graphql.GraphQLSchema) -> None:
261
+ def _resolve_types(self, paths: list[str], schema: graphql.GraphQLSchema) -> None:
270
262
  for type_name, type_ in schema.type_map.items():
271
- if type_name.startswith('__'):
263
+ if type_name.startswith("__"):
272
264
  continue
273
265
 
274
- if type_name in ['Query', 'Mutation']:
266
+ if type_name in {"Query", "Mutation"}:
275
267
  continue
276
268
 
277
269
  resolved_type = graphql_resolver.kind(type_, None)
@@ -280,7 +272,7 @@ class GraphQLParser(Parser):
280
272
  self.all_graphql_objects[type_.name] = type_
281
273
  # TODO: need a special method for each graph type
282
274
  self.references[type_.name] = Reference(
283
- path=f'{str(*paths)}/{resolved_type.value}/{type_.name}',
275
+ path=f"{paths!s}/{resolved_type.value}/{type_.name}",
284
276
  name=type_.name,
285
277
  original_name=type_.name,
286
278
  )
@@ -289,7 +281,7 @@ class GraphQLParser(Parser):
289
281
 
290
282
  def _typename_field(self, name: str) -> DataModelFieldBase:
291
283
  return self.data_model_field_type(
292
- name='typename__',
284
+ name="typename__",
293
285
  data_type=DataType(
294
286
  literals=[name],
295
287
  use_union_operator=self.use_union_operator,
@@ -298,24 +290,23 @@ class GraphQLParser(Parser):
298
290
  default=name,
299
291
  use_annotated=self.use_annotated,
300
292
  required=False,
301
- alias='__typename',
293
+ alias="__typename",
302
294
  use_one_literal_as_default=True,
303
295
  has_default=True,
304
296
  )
305
297
 
306
- def _get_default(
298
+ def _get_default( # noqa: PLR6301
307
299
  self,
308
- field: Union[graphql.GraphQLField, graphql.GraphQLInputField],
300
+ field: graphql.GraphQLField | graphql.GraphQLInputField,
309
301
  final_data_type: DataType,
310
- required: bool,
302
+ required: bool, # noqa: FBT001
311
303
  ) -> Any:
312
304
  if isinstance(field, graphql.GraphQLInputField): # pragma: no cover
313
305
  if field.default_value == graphql.pyutils.Undefined: # pragma: no cover
314
306
  return None
315
307
  return field.default_value
316
- if required is False:
317
- if final_data_type.is_list:
318
- return None
308
+ if required is False and final_data_type.is_list:
309
+ return None
319
310
 
320
311
  return None
321
312
 
@@ -331,15 +322,11 @@ class GraphQLParser(Parser):
331
322
  )
332
323
 
333
324
  def parse_enum(self, enum_object: graphql.GraphQLEnumType) -> None:
334
- enum_fields: List[DataModelFieldBase] = []
335
- exclude_field_names: Set[str] = set()
325
+ enum_fields: list[DataModelFieldBase] = []
326
+ exclude_field_names: set[str] = set()
336
327
 
337
328
  for value_name, value in enum_object.values.items():
338
- default = (
339
- f"'{value_name.translate(escape_characters)}'"
340
- if isinstance(value_name, str)
341
- else value_name
342
- )
329
+ default = f"'{value_name.translate(escape_characters)}'" if isinstance(value_name, str) else value_name
343
330
 
344
331
  field_name = self.model_resolver.get_valid_field_name(
345
332
  value_name, excludes=exclude_field_names, model_type=ModelType.ENUM
@@ -373,8 +360,8 @@ class GraphQLParser(Parser):
373
360
  def parse_field(
374
361
  self,
375
362
  field_name: str,
376
- alias: Optional[str],
377
- field: Union[graphql.GraphQLField, graphql.GraphQLInputField],
363
+ alias: str | None,
364
+ field: graphql.GraphQLField | graphql.GraphQLInputField,
378
365
  ) -> DataModelFieldBase:
379
366
  final_data_type = DataType(
380
367
  is_optional=True,
@@ -399,23 +386,17 @@ class GraphQLParser(Parser):
399
386
  elif graphql.is_non_null_type(obj): # pragma: no cover
400
387
  data_type.is_optional = False
401
388
 
402
- obj = obj.of_type # pyright: ignore [reportAttributeAccessIssue]
389
+ obj = obj.of_type # pyright: ignore[reportAttributeAccessIssue]
403
390
 
404
- data_type.type = obj.name # pyright: ignore [reportAttributeAccessIssue]
391
+ data_type.type = obj.name # pyright: ignore[reportAttributeAccessIssue]
405
392
 
406
- required = (not self.force_optional_for_required_fields) and (
407
- not final_data_type.is_optional
408
- )
393
+ required = (not self.force_optional_for_required_fields) and (not final_data_type.is_optional)
409
394
 
410
395
  default = self._get_default(field, final_data_type, required)
411
- extras = (
412
- {}
413
- if self.default_field_extras is None
414
- else self.default_field_extras.copy()
415
- )
396
+ extras = {} if self.default_field_extras is None else self.default_field_extras.copy()
416
397
 
417
398
  if field.description is not None: # pragma: no cover
418
- extras['description'] = field.description
399
+ extras["description"] = field.description
419
400
 
420
401
  return self.data_model_field_type(
421
402
  name=field_name,
@@ -434,14 +415,10 @@ class GraphQLParser(Parser):
434
415
 
435
416
  def parse_object_like(
436
417
  self,
437
- obj: Union[
438
- graphql.GraphQLInterfaceType,
439
- graphql.GraphQLObjectType,
440
- graphql.GraphQLInputObjectType,
441
- ],
418
+ obj: graphql.GraphQLInterfaceType | graphql.GraphQLObjectType | graphql.GraphQLInputObjectType,
442
419
  ) -> None:
443
420
  fields = []
444
- exclude_field_names: Set[str] = set()
421
+ exclude_field_names: set[str] = set()
445
422
 
446
423
  for field_name, field in obj.fields.items():
447
424
  field_name_, alias = self.model_resolver.get_valid_field_name_and_alias(
@@ -455,8 +432,8 @@ class GraphQLParser(Parser):
455
432
  fields.append(self._typename_field(obj.name))
456
433
 
457
434
  base_classes = []
458
- if hasattr(obj, 'interfaces'): # pragma: no cover
459
- base_classes = [self.references[i.name] for i in obj.interfaces] # pyright: ignore [reportAttributeAccessIssue]
435
+ if hasattr(obj, "interfaces"): # pragma: no cover
436
+ base_classes = [self.references[i.name] for i in obj.interfaces] # pyright: ignore[reportAttributeAccessIssue]
460
437
 
461
438
  data_model_type = self.data_model_type(
462
439
  reference=self.references[obj.name],
@@ -471,26 +448,17 @@ class GraphQLParser(Parser):
471
448
  )
472
449
  self.results.append(data_model_type)
473
450
 
474
- def parse_interface(
475
- self, interface_graphql_object: graphql.GraphQLInterfaceType
476
- ) -> None:
451
+ def parse_interface(self, interface_graphql_object: graphql.GraphQLInterfaceType) -> None:
477
452
  self.parse_object_like(interface_graphql_object)
478
453
 
479
454
  def parse_object(self, graphql_object: graphql.GraphQLObjectType) -> None:
480
455
  self.parse_object_like(graphql_object)
481
456
 
482
- def parse_input_object(
483
- self, input_graphql_object: graphql.GraphQLInputObjectType
484
- ) -> None:
457
+ def parse_input_object(self, input_graphql_object: graphql.GraphQLInputObjectType) -> None:
485
458
  self.parse_object_like(input_graphql_object) # pragma: no cover
486
459
 
487
460
  def parse_union(self, union_object: graphql.GraphQLUnionType) -> None:
488
- fields = []
489
-
490
- for type_ in union_object.types:
491
- fields.append(
492
- self.data_model_field_type(name=type_.name, data_type=DataType())
493
- )
461
+ fields = [self.data_model_field_type(name=type_.name, data_type=DataType()) for type_ in union_object.types]
494
462
 
495
463
  data_model_type = self.data_model_union_type(
496
464
  reference=self.references[union_object.name],
@@ -505,7 +473,7 @@ class GraphQLParser(Parser):
505
473
 
506
474
  def parse_raw(self) -> None:
507
475
  self.all_graphql_objects = {}
508
- self.references: Dict[str, Reference] = {}
476
+ self.references: dict[str, Reference] = {}
509
477
 
510
478
  self.support_graphql_types = {
511
479
  graphql.type.introspection.TypeKind.SCALAR: [],
@@ -517,7 +485,7 @@ class GraphQLParser(Parser):
517
485
  }
518
486
 
519
487
  # may be as a parameter in the future (??)
520
- _mapper_from_graphql_type_to_parser_method = {
488
+ mapper_from_graphql_type_to_parser_method = {
521
489
  graphql.type.introspection.TypeKind.SCALAR: self.parse_scalar,
522
490
  graphql.type.introspection.TypeKind.ENUM: self.parse_enum,
523
491
  graphql.type.introspection.TypeKind.INTERFACE: self.parse_interface,
@@ -534,5 +502,5 @@ class GraphQLParser(Parser):
534
502
 
535
503
  for next_type in self.parse_order:
536
504
  for obj in self.support_graphql_types[next_type]:
537
- parser_ = _mapper_from_graphql_type_to_parser_method[next_type]
538
- parser_(obj) # type: ignore
505
+ parser_ = mapper_from_graphql_type_to_parser_method[next_type]
506
+ parser_(obj)