pyopenapi-gen 0.14.0__py3-none-any.whl → 0.14.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. pyopenapi_gen/cli.py +3 -3
  2. pyopenapi_gen/context/import_collector.py +10 -10
  3. pyopenapi_gen/context/render_context.py +13 -13
  4. pyopenapi_gen/core/auth/plugins.py +7 -7
  5. pyopenapi_gen/core/http_status_codes.py +2 -4
  6. pyopenapi_gen/core/http_transport.py +19 -19
  7. pyopenapi_gen/core/loader/operations/parser.py +2 -2
  8. pyopenapi_gen/core/loader/operations/request_body.py +3 -3
  9. pyopenapi_gen/core/loader/parameters/parser.py +3 -3
  10. pyopenapi_gen/core/loader/responses/parser.py +2 -2
  11. pyopenapi_gen/core/loader/schemas/extractor.py +4 -4
  12. pyopenapi_gen/core/pagination.py +3 -3
  13. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/list_response.py +3 -3
  14. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/missing_ref.py +2 -2
  15. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/new_schema.py +3 -3
  16. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/stripped_suffix.py +3 -3
  17. pyopenapi_gen/core/parsing/common/ref_resolution/resolve_schema_ref.py +2 -2
  18. pyopenapi_gen/core/parsing/common/type_parser.py +2 -3
  19. pyopenapi_gen/core/parsing/context.py +10 -10
  20. pyopenapi_gen/core/parsing/cycle_helpers.py +5 -2
  21. pyopenapi_gen/core/parsing/keywords/all_of_parser.py +5 -5
  22. pyopenapi_gen/core/parsing/keywords/any_of_parser.py +4 -4
  23. pyopenapi_gen/core/parsing/keywords/array_items_parser.py +4 -4
  24. pyopenapi_gen/core/parsing/keywords/one_of_parser.py +4 -4
  25. pyopenapi_gen/core/parsing/keywords/properties_parser.py +5 -5
  26. pyopenapi_gen/core/parsing/schema_finalizer.py +15 -15
  27. pyopenapi_gen/core/parsing/schema_parser.py +44 -25
  28. pyopenapi_gen/core/parsing/transformers/inline_enum_extractor.py +4 -4
  29. pyopenapi_gen/core/parsing/transformers/inline_object_promoter.py +7 -4
  30. pyopenapi_gen/core/parsing/unified_cycle_detection.py +10 -10
  31. pyopenapi_gen/core/schemas.py +10 -10
  32. pyopenapi_gen/core/streaming_helpers.py +5 -7
  33. pyopenapi_gen/core/telemetry.py +4 -4
  34. pyopenapi_gen/core/utils.py +7 -7
  35. pyopenapi_gen/core/writers/code_writer.py +2 -2
  36. pyopenapi_gen/core/writers/documentation_writer.py +18 -18
  37. pyopenapi_gen/core/writers/line_writer.py +3 -3
  38. pyopenapi_gen/core/writers/python_construct_renderer.py +10 -10
  39. pyopenapi_gen/emit/models_emitter.py +2 -2
  40. pyopenapi_gen/emitters/core_emitter.py +3 -5
  41. pyopenapi_gen/emitters/endpoints_emitter.py +12 -12
  42. pyopenapi_gen/emitters/exceptions_emitter.py +4 -3
  43. pyopenapi_gen/emitters/models_emitter.py +6 -6
  44. pyopenapi_gen/generator/client_generator.py +6 -6
  45. pyopenapi_gen/helpers/endpoint_utils.py +16 -18
  46. pyopenapi_gen/helpers/type_cleaner.py +66 -53
  47. pyopenapi_gen/helpers/type_helper.py +7 -7
  48. pyopenapi_gen/helpers/type_resolution/array_resolver.py +4 -4
  49. pyopenapi_gen/helpers/type_resolution/composition_resolver.py +5 -5
  50. pyopenapi_gen/helpers/type_resolution/finalizer.py +38 -22
  51. pyopenapi_gen/helpers/type_resolution/named_resolver.py +4 -5
  52. pyopenapi_gen/helpers/type_resolution/object_resolver.py +11 -11
  53. pyopenapi_gen/helpers/type_resolution/primitive_resolver.py +1 -2
  54. pyopenapi_gen/helpers/type_resolution/resolver.py +2 -3
  55. pyopenapi_gen/ir.py +32 -34
  56. pyopenapi_gen/types/contracts/protocols.py +5 -5
  57. pyopenapi_gen/types/contracts/types.py +2 -3
  58. pyopenapi_gen/types/resolvers/reference_resolver.py +4 -4
  59. pyopenapi_gen/types/resolvers/response_resolver.py +6 -4
  60. pyopenapi_gen/types/resolvers/schema_resolver.py +32 -16
  61. pyopenapi_gen/types/services/type_service.py +55 -9
  62. pyopenapi_gen/types/strategies/response_strategy.py +6 -7
  63. pyopenapi_gen/visit/client_visitor.py +5 -7
  64. pyopenapi_gen/visit/endpoint/generators/docstring_generator.py +7 -7
  65. pyopenapi_gen/visit/endpoint/generators/request_generator.py +5 -5
  66. pyopenapi_gen/visit/endpoint/generators/response_handler_generator.py +38 -17
  67. pyopenapi_gen/visit/endpoint/generators/signature_generator.py +4 -4
  68. pyopenapi_gen/visit/endpoint/generators/url_args_generator.py +17 -17
  69. pyopenapi_gen/visit/endpoint/processors/import_analyzer.py +8 -8
  70. pyopenapi_gen/visit/endpoint/processors/parameter_processor.py +13 -13
  71. pyopenapi_gen/visit/model/alias_generator.py +1 -4
  72. pyopenapi_gen/visit/model/dataclass_generator.py +139 -10
  73. pyopenapi_gen/visit/model/model_visitor.py +2 -3
  74. pyopenapi_gen/visit/visitor.py +3 -3
  75. {pyopenapi_gen-0.14.0.dist-info → pyopenapi_gen-0.14.1.dist-info}/METADATA +1 -1
  76. pyopenapi_gen-0.14.1.dist-info/RECORD +132 -0
  77. pyopenapi_gen-0.14.0.dist-info/RECORD +0 -132
  78. {pyopenapi_gen-0.14.0.dist-info → pyopenapi_gen-0.14.1.dist-info}/WHEEL +0 -0
  79. {pyopenapi_gen-0.14.0.dist-info → pyopenapi_gen-0.14.1.dist-info}/entry_points.txt +0 -0
  80. {pyopenapi_gen-0.14.0.dist-info → pyopenapi_gen-0.14.1.dist-info}/licenses/LICENSE +0 -0
@@ -1,5 +1,8 @@
1
1
  import logging
2
- from typing import Optional
2
+ from typing import TYPE_CHECKING
3
+
4
+ if TYPE_CHECKING:
5
+ pass
3
6
 
4
7
  from pyopenapi_gen import IRSchema
5
8
  from pyopenapi_gen.core.utils import NameSanitizer
@@ -77,7 +80,7 @@ def _handle_cycle_detection(
77
80
  return schema
78
81
 
79
82
 
80
- def _handle_max_depth_exceeded(original_name: Optional[str], context: ParsingContext, max_depth: int) -> IRSchema:
83
+ def _handle_max_depth_exceeded(original_name: str | None, context: ParsingContext, max_depth: int) -> IRSchema:
81
84
  """Handle case where maximum recursion depth is exceeded.
82
85
 
83
86
  Contracts:
@@ -6,7 +6,7 @@ Renamed from all_of_merger to all_of_parser for consistency.
6
6
  from __future__ import annotations
7
7
 
8
8
  import os
9
- from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Set, Tuple
9
+ from typing import TYPE_CHECKING, Any, Callable, List, Mapping, Set, Tuple
10
10
 
11
11
  from pyopenapi_gen import IRSchema
12
12
 
@@ -20,11 +20,11 @@ if TYPE_CHECKING:
20
20
 
21
21
  def _process_all_of(
22
22
  node: Mapping[str, Any],
23
- current_schema_name: Optional[str],
23
+ current_schema_name: str | None,
24
24
  context: ParsingContext,
25
- _parse_schema_func: Callable[[Optional[str], Optional[Mapping[str, Any]], ParsingContext, Optional[int]], IRSchema],
25
+ _parse_schema_func: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int | None], IRSchema],
26
26
  max_depth: int = ENV_MAX_DEPTH,
27
- ) -> Tuple[Dict[str, IRSchema], Set[str], List[IRSchema]]:
27
+ ) -> Tuple[dict[str, IRSchema], Set[str], List[IRSchema]]:
28
28
  """Processes the 'allOf' keyword in a schema node.
29
29
 
30
30
  Merges properties and required fields from all sub-schemas listed in 'allOf'
@@ -54,7 +54,7 @@ def _process_all_of(
54
54
 
55
55
  parsed_all_of_components: List[IRSchema] = []
56
56
  merged_required: Set[str] = set(node.get("required", []))
57
- merged_properties: Dict[str, IRSchema] = {}
57
+ merged_properties: dict[str, IRSchema] = {}
58
58
 
59
59
  if "allOf" not in node:
60
60
  current_node_direct_properties = node.get("properties", {})
@@ -4,7 +4,7 @@ Parser for 'anyOf' keyword in OpenAPI schemas.
4
4
 
5
5
  from __future__ import annotations
6
6
 
7
- from typing import TYPE_CHECKING, Any, Callable, List, Mapping, Optional
7
+ from typing import TYPE_CHECKING, Any, Callable, List, Mapping
8
8
 
9
9
  from pyopenapi_gen import IRSchema # Main IR model
10
10
 
@@ -21,9 +21,9 @@ def _parse_any_of_schemas(
21
21
  context: ParsingContext,
22
22
  max_depth: int,
23
23
  parse_fn: Callable[ # Accepts the main schema parsing function
24
- [Optional[str], Optional[Mapping[str, Any]], ParsingContext, int], IRSchema
24
+ [str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema
25
25
  ],
26
- ) -> tuple[Optional[List[IRSchema]], bool, Optional[str]]:
26
+ ) -> tuple[List[IRSchema] | None, bool, str | None]:
27
27
  """Parses 'anyOf' sub-schemas using a provided parsing function.
28
28
 
29
29
  Contracts:
@@ -51,7 +51,7 @@ def _parse_any_of_schemas(
51
51
 
52
52
  parsed_schemas_list: List[IRSchema] = [] # Renamed to avoid confusion with module name
53
53
  is_nullable_from_any_of = False
54
- effective_schema_type: Optional[str] = None
54
+ effective_schema_type: str | None = None
55
55
 
56
56
  for sub_node in any_of_nodes:
57
57
  if isinstance(sub_node, dict) and sub_node.get("type") == "null":
@@ -5,7 +5,7 @@ Renamed from array_parser.py for clarity.
5
5
 
6
6
  from __future__ import annotations
7
7
 
8
- from typing import TYPE_CHECKING, Any, Callable, Mapping, Optional
8
+ from typing import TYPE_CHECKING, Any, Callable, Mapping
9
9
 
10
10
  from pyopenapi_gen import IRSchema
11
11
 
@@ -18,14 +18,14 @@ if TYPE_CHECKING:
18
18
 
19
19
 
20
20
  def _parse_array_items_schema(
21
- parent_schema_name: Optional[str],
21
+ parent_schema_name: str | None,
22
22
  items_node_data: Mapping[str, Any],
23
23
  context: ParsingContext,
24
24
  parse_fn: Callable[ # Accepts the main schema parsing function
25
- [Optional[str], Optional[Mapping[str, Any]], ParsingContext, int], IRSchema
25
+ [str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema
26
26
  ],
27
27
  max_depth: int,
28
- ) -> Optional[IRSchema]:
28
+ ) -> IRSchema | None:
29
29
  """Parses the 'items' sub-schema of an array.
30
30
 
31
31
  Args:
@@ -4,7 +4,7 @@ Parser for 'oneOf' keyword in OpenAPI schemas.
4
4
 
5
5
  from __future__ import annotations
6
6
 
7
- from typing import TYPE_CHECKING, Any, Callable, List, Mapping, Optional
7
+ from typing import TYPE_CHECKING, Any, Callable, List, Mapping
8
8
 
9
9
  from pyopenapi_gen import IRSchema
10
10
 
@@ -19,8 +19,8 @@ def _parse_one_of_schemas(
19
19
  one_of_nodes: List[Mapping[str, Any]],
20
20
  context: ParsingContext,
21
21
  max_depth: int,
22
- parse_fn: Callable[[Optional[str], Optional[Mapping[str, Any]], ParsingContext, int], IRSchema],
23
- ) -> tuple[Optional[List[IRSchema]], bool, Optional[str]]:
22
+ parse_fn: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema],
23
+ ) -> tuple[List[IRSchema] | None, bool, str | None]:
24
24
  """Parses 'oneOf' sub-schemas using a provided parsing function.
25
25
 
26
26
  Contracts:
@@ -48,7 +48,7 @@ def _parse_one_of_schemas(
48
48
 
49
49
  parsed_schemas_list: List[IRSchema] = []
50
50
  is_nullable_from_one_of = False
51
- effective_schema_type: Optional[str] = None
51
+ effective_schema_type: str | None = None
52
52
 
53
53
  for sub_node in one_of_nodes:
54
54
  if isinstance(sub_node, dict) and sub_node.get("type") == "null":
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import logging
4
- from typing import TYPE_CHECKING, Any, Callable, Dict, Mapping, Optional
4
+ from typing import TYPE_CHECKING, Any, Callable, Mapping
5
5
 
6
6
  # Import NameSanitizer for use in name generation
7
7
  from pyopenapi_gen.core.utils import NameSanitizer
@@ -27,12 +27,12 @@ if TYPE_CHECKING:
27
27
 
28
28
  def _parse_properties(
29
29
  properties_node: Mapping[str, Any],
30
- parent_schema_name: Optional[str],
30
+ parent_schema_name: str | None,
31
31
  context: ParsingContext,
32
32
  max_depth: int,
33
- parse_fn: Callable[[Optional[str], Optional[Mapping[str, Any]], ParsingContext, Optional[int]], IRSchema],
33
+ parse_fn: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int | None], IRSchema],
34
34
  logger: logging.Logger,
35
- ) -> Dict[str, IRSchema]:
35
+ ) -> dict[str, IRSchema]:
36
36
  """Parse properties from a schema node.
37
37
 
38
38
  Contracts:
@@ -45,7 +45,7 @@ def _parse_properties(
45
45
  - Returns a dictionary mapping property names to IRSchema instances
46
46
  - Property references are properly maintained
47
47
  """
48
- properties_map: Dict[str, IRSchema] = {}
48
+ properties_map: dict[str, IRSchema] = {}
49
49
 
50
50
  for prop_key, prop_schema_node in properties_node.items():
51
51
  # Skip invalid property names
@@ -4,7 +4,7 @@ Finalization an IRSchema object during OpenAPI parsing.
4
4
 
5
5
  from __future__ import annotations
6
6
 
7
- from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Set, Union
7
+ from typing import TYPE_CHECKING, Any, Callable, List, Mapping, Set, Union
8
8
 
9
9
  from pyopenapi_gen import IRSchema
10
10
  from pyopenapi_gen.core.utils import NameSanitizer
@@ -21,24 +21,24 @@ if TYPE_CHECKING:
21
21
 
22
22
 
23
23
  def _finalize_schema_object(
24
- name: Optional[str],
24
+ name: str | None,
25
25
  node: Mapping[str, Any],
26
26
  context: ParsingContext,
27
- schema_type: Optional[str],
27
+ schema_type: str | None,
28
28
  is_nullable: bool,
29
- any_of_schemas: Optional[List[IRSchema]],
30
- one_of_schemas: Optional[List[IRSchema]],
31
- parsed_all_of_components: Optional[List[IRSchema]],
32
- final_properties_map: Dict[str, IRSchema],
29
+ any_of_schemas: List[IRSchema] | None,
30
+ one_of_schemas: List[IRSchema] | None,
31
+ parsed_all_of_components: List[IRSchema] | None,
32
+ final_properties_map: dict[str, IRSchema],
33
33
  merged_required_set: Set[str],
34
- final_items_schema: Optional[IRSchema],
35
- additional_properties_node: Optional[Union[bool, Mapping[str, Any]]],
36
- enum_node: Optional[List[Any]],
37
- format_node: Optional[str],
38
- description_node: Optional[str],
34
+ final_items_schema: IRSchema | None,
35
+ additional_properties_node: Union[bool, Mapping[str, Any]] | None,
36
+ enum_node: List[Any] | None,
37
+ format_node: str | None,
38
+ description_node: str | None,
39
39
  from_unresolved_ref_node: bool,
40
40
  max_depth: int,
41
- parse_fn: Callable[[Optional[str], Optional[Mapping[str, Any]], ParsingContext, int], IRSchema],
41
+ parse_fn: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema],
42
42
  logger: Any, # Changed to Any to support both real and mock loggers
43
43
  ) -> IRSchema:
44
44
  """Constructs the IRSchema object, performs final adjustments, and updates context.
@@ -71,10 +71,10 @@ def _finalize_schema_object(
71
71
  # Early return of cycle placeholder
72
72
  return existing_schema
73
73
 
74
- final_enum_values: Optional[List[Any]] = enum_node if isinstance(enum_node, list) else None
74
+ final_enum_values: List[Any] | None = enum_node if isinstance(enum_node, list) else None
75
75
  final_required_fields_list: List[str] = sorted(list(merged_required_set))
76
76
 
77
- final_additional_properties: Optional[Union[bool, IRSchema]] = None
77
+ final_additional_properties: Union[bool, IRSchema] | None = None
78
78
  if isinstance(additional_properties_node, bool):
79
79
  final_additional_properties = additional_properties_node
80
80
  elif isinstance(additional_properties_node, dict):
@@ -6,7 +6,7 @@ from __future__ import annotations
6
6
 
7
7
  import logging
8
8
  import os
9
- from typing import Any, Callable, Dict, List, Mapping, Optional, Set, Tuple
9
+ from typing import Any, Callable, List, Mapping, Set, Tuple
10
10
 
11
11
  from pyopenapi_gen import IRSchema
12
12
  from pyopenapi_gen.core.utils import NameSanitizer
@@ -32,9 +32,9 @@ except ValueError:
32
32
 
33
33
  def _resolve_ref(
34
34
  ref_path_str: str,
35
- parent_schema_name: Optional[str], # Name of the schema containing this $ref
35
+ parent_schema_name: str | None, # Name of the schema containing this $ref
36
36
  context: ParsingContext,
37
- max_depth_override: Optional[int], # Propagated from the main _parse_schema call
37
+ max_depth_override: int | None, # Propagated from the main _parse_schema call
38
38
  allow_self_reference_for_parent: bool,
39
39
  ) -> IRSchema:
40
40
  """Resolves a $ref string, handling cycles and depth for the referenced schema."""
@@ -82,13 +82,11 @@ def _resolve_ref(
82
82
 
83
83
  def _parse_composition_keywords(
84
84
  node: Mapping[str, Any],
85
- name: Optional[str],
85
+ name: str | None,
86
86
  context: ParsingContext,
87
87
  max_depth: int,
88
- parse_fn: Callable[[Optional[str], Optional[Mapping[str, Any]], ParsingContext, Optional[int]], IRSchema],
89
- ) -> Tuple[
90
- Optional[List[IRSchema]], Optional[List[IRSchema]], Optional[List[IRSchema]], Dict[str, IRSchema], Set[str], bool
91
- ]:
88
+ parse_fn: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int | None], IRSchema],
89
+ ) -> Tuple[List[IRSchema] | None, List[IRSchema] | None, List[IRSchema] | None, dict[str, IRSchema], Set[str], bool]:
92
90
  """Parse composition keywords (anyOf, oneOf, allOf) from a schema node.
93
91
 
94
92
  Contracts:
@@ -101,10 +99,10 @@ def _parse_composition_keywords(
101
99
  - Returns a tuple of (any_of_schemas, one_of_schemas, all_of_components,
102
100
  properties, required_fields, is_nullable)
103
101
  """
104
- any_of_schemas: Optional[List[IRSchema]] = None
105
- one_of_schemas: Optional[List[IRSchema]] = None
106
- parsed_all_of_components: Optional[List[IRSchema]] = None
107
- merged_properties: Dict[str, IRSchema] = {}
102
+ any_of_schemas: List[IRSchema] | None = None
103
+ one_of_schemas: List[IRSchema] | None = None
104
+ parsed_all_of_components: List[IRSchema] | None = None
105
+ merged_properties: dict[str, IRSchema] = {}
108
106
  merged_required_set: Set[str] = set()
109
107
  is_nullable: bool = False
110
108
 
@@ -130,14 +128,14 @@ def _parse_composition_keywords(
130
128
 
131
129
  def _parse_properties(
132
130
  properties_node: Mapping[str, Any],
133
- parent_schema_name: Optional[str],
134
- existing_properties: Dict[str, IRSchema], # Properties already merged, e.g., from allOf
131
+ parent_schema_name: str | None,
132
+ existing_properties: dict[str, IRSchema], # Properties already merged, e.g., from allOf
135
133
  context: ParsingContext,
136
- max_depth_override: Optional[int],
134
+ max_depth_override: int | None,
137
135
  allow_self_reference: bool,
138
- ) -> Dict[str, IRSchema]:
136
+ ) -> dict[str, IRSchema]:
139
137
  """Parses the 'properties' block of a schema node."""
140
- parsed_props: Dict[str, IRSchema] = existing_properties.copy()
138
+ parsed_props: dict[str, IRSchema] = existing_properties.copy()
141
139
 
142
140
  for prop_name, prop_schema_node in properties_node.items():
143
141
  if not isinstance(prop_name, str) or not prop_name:
@@ -316,10 +314,10 @@ def _parse_properties(
316
314
 
317
315
 
318
316
  def _parse_schema(
319
- schema_name: Optional[str],
320
- schema_node: Optional[Mapping[str, Any]],
317
+ schema_name: str | None,
318
+ schema_node: Mapping[str, Any] | None,
321
319
  context: ParsingContext,
322
- max_depth_override: Optional[int] = None,
320
+ max_depth_override: int | None = None,
323
321
  allow_self_reference: bool = False,
324
322
  ) -> IRSchema:
325
323
  """
@@ -376,6 +374,8 @@ def _parse_schema(
376
374
 
377
375
  try: # Ensure exit_schema is called
378
376
  if schema_node is None:
377
+ # Create empty schema for null schema nodes
378
+ # Do NOT set generation_name - null schemas should resolve to Any inline, not generate separate files
379
379
  return IRSchema(name=NameSanitizer.sanitize_class_name(schema_name) if schema_name else None)
380
380
 
381
381
  if not isinstance(schema_node, Mapping):
@@ -412,7 +412,7 @@ def _parse_schema(
412
412
 
413
413
  return resolved_schema
414
414
 
415
- extracted_type: Optional[str] = None
415
+ extracted_type: str | None = None
416
416
  is_nullable_from_type_field = False
417
417
  raw_type_field = schema_node.get("type")
418
418
 
@@ -455,8 +455,10 @@ def _parse_schema(
455
455
  )
456
456
  )
457
457
 
458
- is_nullable_overall = is_nullable_from_type_field or nullable_from_comp
459
- final_properties_for_ir: Dict[str, IRSchema] = {}
458
+ # Check for direct nullable field (OpenAPI 3.0 Swagger extension)
459
+ is_nullable_from_node = schema_node.get("nullable", False)
460
+ is_nullable_overall = is_nullable_from_type_field or nullable_from_comp or is_nullable_from_node
461
+ final_properties_for_ir: dict[str, IRSchema] = {}
460
462
  current_final_type = extracted_type
461
463
  if not current_final_type:
462
464
  if props_from_comp or "allOf" in schema_node or "properties" in schema_node:
@@ -512,7 +514,7 @@ def _parse_schema(
512
514
  if "required" in schema_node and isinstance(schema_node["required"], list):
513
515
  final_required_fields_set.update(schema_node["required"])
514
516
 
515
- items_ir: Optional[IRSchema] = None
517
+ items_ir: IRSchema | None = None
516
518
  if current_final_type == "array":
517
519
  items_node = schema_node.get("items")
518
520
  if items_node:
@@ -560,6 +562,22 @@ def _parse_schema(
560
562
 
561
563
  schema_ir_name_attr = NameSanitizer.sanitize_class_name(schema_name) if schema_name else None
562
564
 
565
+ # Parse additionalProperties field
566
+ additional_properties_value: bool | IRSchema | None = None
567
+ if "additionalProperties" in schema_node:
568
+ additional_props_node = schema_node["additionalProperties"]
569
+ if isinstance(additional_props_node, bool):
570
+ additional_properties_value = additional_props_node
571
+ elif isinstance(additional_props_node, dict):
572
+ # Parse the additionalProperties schema
573
+ additional_properties_value = _parse_schema(
574
+ None, # No name for additional properties schema
575
+ additional_props_node,
576
+ context,
577
+ max_depth_override,
578
+ allow_self_reference,
579
+ )
580
+
563
581
  schema_ir = IRSchema(
564
582
  name=schema_ir_name_attr,
565
583
  type=current_final_type,
@@ -575,11 +593,12 @@ def _parse_schema(
575
593
  example=schema_node.get("example"),
576
594
  is_nullable=is_nullable_overall,
577
595
  items=items_ir,
596
+ additional_properties=additional_properties_value,
578
597
  )
579
598
 
580
599
  if schema_ir.type == "array" and isinstance(schema_node.get("items"), Mapping):
581
600
  raw_items_node = schema_node["items"]
582
- item_schema_context_name_for_reparse: Optional[str]
601
+ item_schema_context_name_for_reparse: str | None
583
602
  base_name_for_reparse_item = schema_name or "AnonymousArray"
584
603
  item_schema_context_name_for_reparse = NameSanitizer.sanitize_class_name(
585
604
  f"{base_name_for_reparse_item}Item"
@@ -5,7 +5,7 @@ Handles the extraction of inline enums from schema definitions.
5
5
  from __future__ import annotations
6
6
 
7
7
  import logging
8
- from typing import Any, Mapping, Optional
8
+ from typing import Any, Mapping
9
9
 
10
10
  from .... import IRSchema
11
11
  from ...utils import NameSanitizer
@@ -13,12 +13,12 @@ from ..context import ParsingContext
13
13
 
14
14
 
15
15
  def _extract_enum_from_property_node(
16
- parent_schema_name: Optional[str],
16
+ parent_schema_name: str | None,
17
17
  property_key: str,
18
18
  property_node_data: Mapping[str, Any],
19
19
  context: ParsingContext,
20
20
  logger: logging.Logger,
21
- ) -> Optional[IRSchema]:
21
+ ) -> IRSchema | None:
22
22
  """
23
23
  Checks a property's schema node for an inline enum definition.
24
24
 
@@ -126,7 +126,7 @@ def _extract_enum_from_property_node(
126
126
 
127
127
 
128
128
  def _process_standalone_inline_enum(
129
- schema_name: Optional[str], # The original intended name for this schema
129
+ schema_name: str | None, # The original intended name for this schema
130
130
  node_data: Mapping[str, Any], # The raw node data for this schema
131
131
  schema_obj: IRSchema, # The IRSchema object already partially parsed for this node
132
132
  context: ParsingContext,
@@ -5,7 +5,10 @@ Handles the promotion of inline object schemas to global schemas.
5
5
  from __future__ import annotations
6
6
 
7
7
  import logging
8
- from typing import Optional
8
+ from typing import TYPE_CHECKING
9
+
10
+ if TYPE_CHECKING:
11
+ pass
9
12
 
10
13
  from .... import IRSchema
11
14
  from ...utils import NameSanitizer
@@ -13,12 +16,12 @@ from ..context import ParsingContext
13
16
 
14
17
 
15
18
  def _attempt_promote_inline_object(
16
- parent_schema_name: Optional[str], # Name of the schema containing the property
19
+ parent_schema_name: str | None, # Name of the schema containing the property
17
20
  property_key: str, # The key (name) of the property being processed
18
21
  property_schema_obj: IRSchema, # The IRSchema of the property itself (already parsed)
19
22
  context: ParsingContext,
20
23
  logger: logging.Logger,
21
- ) -> Optional[IRSchema]:
24
+ ) -> IRSchema | None:
22
25
  logger.debug(
23
26
  f"PROMO_ATTEMPT: parent='{parent_schema_name}', prop_key='{property_key}', "
24
27
  f"prop_schema_name='{property_schema_obj.name}', prop_schema_type='{property_schema_obj.type}', "
@@ -75,7 +78,7 @@ def _attempt_promote_inline_object(
75
78
  else:
76
79
  base_name_candidate = sanitized_prop_key_class_name
77
80
 
78
- chosen_global_name: Optional[str] = None
81
+ chosen_global_name: str | None = None
79
82
 
80
83
  # Check if the primary candidate name is available or already points to this object
81
84
  if base_name_candidate in context.parsed_schemas:
@@ -10,7 +10,7 @@ from __future__ import annotations
10
10
  import logging
11
11
  from dataclasses import dataclass, field
12
12
  from enum import Enum
13
- from typing import Dict, List, Optional, Set
13
+ from typing import List, Set
14
14
 
15
15
  from pyopenapi_gen import IRSchema
16
16
  from pyopenapi_gen.core.utils import NameSanitizer
@@ -62,10 +62,10 @@ class CycleDetectionResult:
62
62
  """Result of cycle detection check."""
63
63
 
64
64
  is_cycle: bool
65
- cycle_type: Optional[CycleType]
65
+ cycle_type: CycleType | None
66
66
  action: CycleAction
67
- cycle_info: Optional[CycleInfo] = None
68
- placeholder_schema: Optional[IRSchema] = None
67
+ cycle_info: CycleInfo | None = None
68
+ placeholder_schema: IRSchema | None = None
69
69
 
70
70
 
71
71
  @dataclass
@@ -74,8 +74,8 @@ class UnifiedCycleContext:
74
74
 
75
75
  # Core tracking
76
76
  schema_stack: List[str] = field(default_factory=list)
77
- schema_states: Dict[str, SchemaState] = field(default_factory=dict)
78
- parsed_schemas: Dict[str, IRSchema] = field(default_factory=dict)
77
+ schema_states: dict[str, SchemaState] = field(default_factory=dict)
78
+ parsed_schemas: dict[str, IRSchema] = field(default_factory=dict)
79
79
  recursion_depth: int = 0
80
80
 
81
81
  # Detection results
@@ -155,7 +155,7 @@ def create_depth_placeholder(schema_name: str, depth: int) -> IRSchema:
155
155
  )
156
156
 
157
157
 
158
- def unified_cycle_check(schema_name: Optional[str], context: UnifiedCycleContext) -> CycleDetectionResult:
158
+ def unified_cycle_check(schema_name: str | None, context: UnifiedCycleContext) -> CycleDetectionResult:
159
159
  """Unified cycle detection that handles all cases."""
160
160
 
161
161
  if schema_name is None:
@@ -262,7 +262,7 @@ def unified_cycle_check(schema_name: Optional[str], context: UnifiedCycleContext
262
262
  return CycleDetectionResult(False, None, CycleAction.CONTINUE_PARSING)
263
263
 
264
264
 
265
- def unified_enter_schema(schema_name: Optional[str], context: UnifiedCycleContext) -> CycleDetectionResult:
265
+ def unified_enter_schema(schema_name: str | None, context: UnifiedCycleContext) -> CycleDetectionResult:
266
266
  """Unified entry point that always maintains consistent state."""
267
267
  context.recursion_depth += 1
268
268
 
@@ -275,7 +275,7 @@ def unified_enter_schema(schema_name: Optional[str], context: UnifiedCycleContex
275
275
  return result
276
276
 
277
277
 
278
- def unified_exit_schema(schema_name: Optional[str], context: UnifiedCycleContext) -> None:
278
+ def unified_exit_schema(schema_name: str | None, context: UnifiedCycleContext) -> None:
279
279
  """Unified exit that always maintains consistent state."""
280
280
  if context.recursion_depth > 0:
281
281
  context.recursion_depth -= 1
@@ -288,6 +288,6 @@ def unified_exit_schema(schema_name: Optional[str], context: UnifiedCycleContext
288
288
  context.schema_states[schema_name] = SchemaState.COMPLETED
289
289
 
290
290
 
291
- def get_schema_or_placeholder(schema_name: str, context: UnifiedCycleContext) -> Optional[IRSchema]:
291
+ def get_schema_or_placeholder(schema_name: str, context: UnifiedCycleContext) -> IRSchema | None:
292
292
  """Get an existing schema or placeholder from the context."""
293
293
  return context.parsed_schemas.get(schema_name)
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from dataclasses import MISSING, dataclass, fields
4
- from typing import Any, Dict, Type, TypeVar, Union, get_args, get_origin, get_type_hints
4
+ from typing import Any, Type, TypeVar, Union, get_args, get_origin, get_type_hints
5
5
 
6
6
  T = TypeVar("T", bound="BaseSchema")
7
7
 
@@ -10,7 +10,7 @@ def _extract_base_type(field_type: Any) -> Any:
10
10
  """Extract the base type from Optional/Union types."""
11
11
  origin = get_origin(field_type)
12
12
  if origin is Union:
13
- # For Optional[T] or Union[T, None], get the non-None type
13
+ # For T | None or Union[T, None], get the non-None type
14
14
  args = get_args(field_type)
15
15
  non_none_args = [arg for arg in args if arg is not type(None)]
16
16
  if len(non_none_args) == 1:
@@ -23,26 +23,26 @@ class BaseSchema:
23
23
  """Base class for all generated models, providing validation, dict conversion, and field mapping."""
24
24
 
25
25
  @classmethod
26
- def _get_field_mappings(cls) -> Dict[str, str]:
26
+ def _get_field_mappings(cls) -> dict[str, str]:
27
27
  """Get field mappings from Meta class if defined. Returns API field -> Python field mappings."""
28
28
  if hasattr(cls, "Meta") and hasattr(cls.Meta, "key_transform_with_load"):
29
29
  return cls.Meta.key_transform_with_load # type: ignore[no-any-return]
30
30
  return {}
31
31
 
32
32
  @classmethod
33
- def _get_reverse_field_mappings(cls) -> Dict[str, str]:
33
+ def _get_reverse_field_mappings(cls) -> dict[str, str]:
34
34
  """Get reverse field mappings. Returns Python field -> API field mappings."""
35
35
  mappings = cls._get_field_mappings()
36
36
  return {python_field: api_field for api_field, python_field in mappings.items()}
37
37
 
38
38
  @classmethod
39
- def from_dict(cls: Type[T], data: Dict[str, Any]) -> T:
39
+ def from_dict(cls: Type[T], data: dict[str, Any]) -> T:
40
40
  """Create an instance from a dictionary with automatic field name mapping."""
41
41
  if not isinstance(data, dict):
42
42
  raise TypeError(f"Input must be a dictionary, got {type(data).__name__}")
43
43
 
44
44
  field_mappings = cls._get_field_mappings() # API -> Python
45
- kwargs: Dict[str, Any] = {}
45
+ kwargs: dict[str, Any] = {}
46
46
  cls_fields = {f.name: f for f in fields(cls)}
47
47
 
48
48
  # Process each field in the data
@@ -64,7 +64,7 @@ class BaseSchema:
64
64
  # Fall back to raw annotation if get_type_hints fails
65
65
  pass
66
66
 
67
- # Extract base type (handles Optional[Type] -> Type)
67
+ # Extract base type (handles Type | None -> Type)
68
68
  base_type = _extract_base_type(field_type)
69
69
 
70
70
  if base_type is not None and hasattr(base_type, "from_dict") and isinstance(value, dict):
@@ -90,7 +90,7 @@ class BaseSchema:
90
90
 
91
91
  return cls(**kwargs)
92
92
 
93
- def to_dict(self, exclude_none: bool = False) -> Dict[str, Any]:
93
+ def to_dict(self, exclude_none: bool = False) -> dict[str, Any]:
94
94
  """Convert the model instance to a dictionary with reverse field name mapping."""
95
95
  reverse_mappings = self._get_reverse_field_mappings() # Python -> API
96
96
  result = {}
@@ -116,10 +116,10 @@ class BaseSchema:
116
116
 
117
117
  # Legacy aliases for backward compatibility
118
118
  @classmethod
119
- def model_validate(cls: Type[T], data: Dict[str, Any]) -> T:
119
+ def model_validate(cls: Type[T], data: dict[str, Any]) -> T:
120
120
  """Legacy alias for from_dict."""
121
121
  return cls.from_dict(data)
122
122
 
123
- def model_dump(self, exclude_none: bool = False) -> Dict[str, Any]:
123
+ def model_dump(self, exclude_none: bool = False) -> dict[str, Any]:
124
124
  """Legacy alias for to_dict."""
125
125
  return self.to_dict(exclude_none=exclude_none)
@@ -1,17 +1,15 @@
1
1
  import json
2
- from typing import Any, AsyncIterator, List, Optional
2
+ from typing import Any, AsyncIterator, List
3
3
 
4
4
  import httpx
5
5
 
6
6
 
7
7
  class SSEEvent:
8
- def __init__(
9
- self, data: str, event: Optional[str] = None, id: Optional[str] = None, retry: Optional[int] = None
10
- ) -> None:
8
+ def __init__(self, data: str, event: str | None = None, id: str | None = None, retry: int | None = None) -> None:
11
9
  self.data: str = data
12
- self.event: Optional[str] = event
13
- self.id: Optional[str] = id
14
- self.retry: Optional[int] = retry
10
+ self.event: str | None = event
11
+ self.id: str | None = id
12
+ self.retry: int | None = retry
15
13
 
16
14
  def __repr__(self) -> str:
17
15
  return f"SSEEvent(data={self.data!r}, event={self.event!r}, id={self.id!r}, retry={self.retry!r})"