pyopenapi-gen 0.14.0__py3-none-any.whl → 0.14.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. pyopenapi_gen/cli.py +3 -3
  2. pyopenapi_gen/context/import_collector.py +10 -10
  3. pyopenapi_gen/context/render_context.py +13 -13
  4. pyopenapi_gen/core/auth/plugins.py +7 -7
  5. pyopenapi_gen/core/http_status_codes.py +2 -4
  6. pyopenapi_gen/core/http_transport.py +19 -19
  7. pyopenapi_gen/core/loader/operations/parser.py +2 -2
  8. pyopenapi_gen/core/loader/operations/request_body.py +3 -3
  9. pyopenapi_gen/core/loader/parameters/parser.py +3 -3
  10. pyopenapi_gen/core/loader/responses/parser.py +2 -2
  11. pyopenapi_gen/core/loader/schemas/extractor.py +4 -4
  12. pyopenapi_gen/core/pagination.py +3 -3
  13. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/list_response.py +3 -3
  14. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/missing_ref.py +2 -2
  15. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/new_schema.py +3 -3
  16. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/stripped_suffix.py +3 -3
  17. pyopenapi_gen/core/parsing/common/ref_resolution/resolve_schema_ref.py +2 -2
  18. pyopenapi_gen/core/parsing/common/type_parser.py +2 -3
  19. pyopenapi_gen/core/parsing/context.py +10 -10
  20. pyopenapi_gen/core/parsing/cycle_helpers.py +5 -2
  21. pyopenapi_gen/core/parsing/keywords/all_of_parser.py +5 -5
  22. pyopenapi_gen/core/parsing/keywords/any_of_parser.py +4 -4
  23. pyopenapi_gen/core/parsing/keywords/array_items_parser.py +4 -4
  24. pyopenapi_gen/core/parsing/keywords/one_of_parser.py +4 -4
  25. pyopenapi_gen/core/parsing/keywords/properties_parser.py +5 -5
  26. pyopenapi_gen/core/parsing/schema_finalizer.py +15 -15
  27. pyopenapi_gen/core/parsing/schema_parser.py +44 -25
  28. pyopenapi_gen/core/parsing/transformers/inline_enum_extractor.py +4 -4
  29. pyopenapi_gen/core/parsing/transformers/inline_object_promoter.py +7 -4
  30. pyopenapi_gen/core/parsing/unified_cycle_detection.py +10 -10
  31. pyopenapi_gen/core/schemas.py +10 -10
  32. pyopenapi_gen/core/streaming_helpers.py +5 -7
  33. pyopenapi_gen/core/telemetry.py +4 -4
  34. pyopenapi_gen/core/utils.py +7 -7
  35. pyopenapi_gen/core/writers/code_writer.py +2 -2
  36. pyopenapi_gen/core/writers/documentation_writer.py +18 -18
  37. pyopenapi_gen/core/writers/line_writer.py +3 -3
  38. pyopenapi_gen/core/writers/python_construct_renderer.py +10 -10
  39. pyopenapi_gen/emit/models_emitter.py +2 -2
  40. pyopenapi_gen/emitters/core_emitter.py +3 -5
  41. pyopenapi_gen/emitters/endpoints_emitter.py +24 -16
  42. pyopenapi_gen/emitters/exceptions_emitter.py +4 -3
  43. pyopenapi_gen/emitters/models_emitter.py +6 -6
  44. pyopenapi_gen/generator/client_generator.py +6 -6
  45. pyopenapi_gen/helpers/endpoint_utils.py +16 -18
  46. pyopenapi_gen/helpers/type_cleaner.py +66 -53
  47. pyopenapi_gen/helpers/type_helper.py +7 -7
  48. pyopenapi_gen/helpers/type_resolution/array_resolver.py +4 -4
  49. pyopenapi_gen/helpers/type_resolution/composition_resolver.py +5 -5
  50. pyopenapi_gen/helpers/type_resolution/finalizer.py +38 -22
  51. pyopenapi_gen/helpers/type_resolution/named_resolver.py +4 -5
  52. pyopenapi_gen/helpers/type_resolution/object_resolver.py +11 -11
  53. pyopenapi_gen/helpers/type_resolution/primitive_resolver.py +1 -2
  54. pyopenapi_gen/helpers/type_resolution/resolver.py +2 -3
  55. pyopenapi_gen/ir.py +32 -34
  56. pyopenapi_gen/types/contracts/protocols.py +5 -5
  57. pyopenapi_gen/types/contracts/types.py +2 -3
  58. pyopenapi_gen/types/resolvers/reference_resolver.py +4 -4
  59. pyopenapi_gen/types/resolvers/response_resolver.py +6 -4
  60. pyopenapi_gen/types/resolvers/schema_resolver.py +32 -16
  61. pyopenapi_gen/types/services/type_service.py +55 -9
  62. pyopenapi_gen/types/strategies/response_strategy.py +6 -7
  63. pyopenapi_gen/visit/client_visitor.py +5 -7
  64. pyopenapi_gen/visit/endpoint/generators/docstring_generator.py +7 -7
  65. pyopenapi_gen/visit/endpoint/generators/request_generator.py +5 -5
  66. pyopenapi_gen/visit/endpoint/generators/response_handler_generator.py +38 -17
  67. pyopenapi_gen/visit/endpoint/generators/signature_generator.py +4 -4
  68. pyopenapi_gen/visit/endpoint/generators/url_args_generator.py +17 -17
  69. pyopenapi_gen/visit/endpoint/processors/import_analyzer.py +8 -8
  70. pyopenapi_gen/visit/endpoint/processors/parameter_processor.py +13 -13
  71. pyopenapi_gen/visit/model/alias_generator.py +1 -4
  72. pyopenapi_gen/visit/model/dataclass_generator.py +139 -10
  73. pyopenapi_gen/visit/model/model_visitor.py +2 -3
  74. pyopenapi_gen/visit/visitor.py +3 -3
  75. {pyopenapi_gen-0.14.0.dist-info → pyopenapi_gen-0.14.2.dist-info}/METADATA +1 -1
  76. pyopenapi_gen-0.14.2.dist-info/RECORD +132 -0
  77. pyopenapi_gen-0.14.0.dist-info/RECORD +0 -132
  78. {pyopenapi_gen-0.14.0.dist-info → pyopenapi_gen-0.14.2.dist-info}/WHEEL +0 -0
  79. {pyopenapi_gen-0.14.0.dist-info → pyopenapi_gen-0.14.2.dist-info}/entry_points.txt +0 -0
  80. {pyopenapi_gen-0.14.0.dist-info → pyopenapi_gen-0.14.2.dist-info}/licenses/LICENSE +0 -0
pyopenapi_gen/cli.py CHANGED
@@ -1,5 +1,5 @@
1
1
  from pathlib import Path
2
- from typing import Any, Dict, Optional, Union
2
+ from typing import Any, Union
3
3
 
4
4
  import typer
5
5
  import yaml
@@ -7,7 +7,7 @@ import yaml
7
7
  from .generator.client_generator import ClientGenerator, GenerationError
8
8
 
9
9
 
10
- def _load_spec(path_or_url: str) -> Union[Dict[str, Any], Any]:
10
+ def _load_spec(path_or_url: str) -> Union[dict[str, Any], Any]:
11
11
  """Load a spec from a file path or URL."""
12
12
  if Path(path_or_url).exists():
13
13
  return yaml.safe_load(Path(path_or_url).read_text())
@@ -30,7 +30,7 @@ def main(
30
30
  ),
31
31
  force: bool = typer.Option(False, "-f", "--force", help="Overwrite without diff check"),
32
32
  no_postprocess: bool = typer.Option(False, "--no-postprocess", help="Skip post-processing (type checking, etc.)"),
33
- core_package: Optional[str] = typer.Option(
33
+ core_package: str | None = typer.Option(
34
34
  None,
35
35
  "--core-package",
36
36
  help=(
@@ -9,7 +9,7 @@ direct, relative, and plain imports, with methods to add and query import statem
9
9
  import logging
10
10
  import sys
11
11
  from collections import defaultdict
12
- from typing import Dict, List, Optional, Set
12
+ from typing import List, Set
13
13
 
14
14
  # Initialize module logger
15
15
  logger = logging.getLogger(__name__)
@@ -143,18 +143,18 @@ class ImportCollector:
143
143
  def __init__(self) -> None:
144
144
  """Initialize a new ImportCollector with empty collections for all import types."""
145
145
  # Standard imports (from x import y)
146
- self.imports: Dict[str, Set[str]] = {}
146
+ self.imports: dict[str, Set[str]] = {}
147
147
  # Direct imports like 'from datetime import date'
148
- # self.direct_imports: Dict[str, Set[str]] = {} # Removed
148
+ # self.direct_imports: dict[str, Set[str]] = {} # Removed
149
149
  # Relative imports like 'from .models import Pet'
150
150
  self.relative_imports: defaultdict[str, set[str]] = defaultdict(set)
151
151
  # Plain imports like 'import json'
152
152
  self.plain_imports: set[str] = set()
153
153
 
154
154
  # Path information for the current file, used by get_formatted_imports
155
- self._current_file_module_dot_path: Optional[str] = None
156
- self._current_file_package_root: Optional[str] = None
157
- self._current_file_core_pkg_name_for_abs: Optional[str] = None
155
+ self._current_file_module_dot_path: str | None = None
156
+ self._current_file_package_root: str | None = None
157
+ self._current_file_core_pkg_name_for_abs: str | None = None
158
158
 
159
159
  def reset(self) -> None:
160
160
  """Reset the collector to its initial empty state."""
@@ -167,9 +167,9 @@ class ImportCollector:
167
167
 
168
168
  def set_current_file_context_for_rendering(
169
169
  self,
170
- current_module_dot_path: Optional[str],
171
- package_root: Optional[str],
172
- core_package_name_for_absolute_treatment: Optional[str],
170
+ current_module_dot_path: str | None,
171
+ package_root: str | None,
172
+ core_package_name_for_absolute_treatment: str | None,
173
173
  ) -> None:
174
174
  """Set the context for the current file, used by get_formatted_imports."""
175
175
  self._current_file_module_dot_path = current_module_dot_path
@@ -234,7 +234,7 @@ class ImportCollector:
234
234
  """
235
235
  self.plain_imports.add(module)
236
236
 
237
- def has_import(self, module: str, name: Optional[str] = None) -> bool:
237
+ def has_import(self, module: str, name: str | None = None) -> bool:
238
238
  """Check if a specific module or name within a module is already imported."""
239
239
  if name:
240
240
  # Check absolute/standard imports
@@ -12,7 +12,7 @@ import os
12
12
  import re
13
13
  import sys
14
14
  from pathlib import Path
15
- from typing import Dict, Optional, Set
15
+ from typing import Set
16
16
 
17
17
  from pyopenapi_gen import IRSchema
18
18
  from pyopenapi_gen.core.utils import NameSanitizer
@@ -50,13 +50,13 @@ class RenderContext:
50
50
 
51
51
  def __init__(
52
52
  self,
53
- file_manager: Optional[FileManager] = None,
53
+ file_manager: FileManager | None = None,
54
54
  core_package_name: str = "core",
55
- package_root_for_generated_code: Optional[str] = None,
56
- overall_project_root: Optional[str] = None,
57
- parsed_schemas: Optional[Dict[str, IRSchema]] = None,
55
+ package_root_for_generated_code: str | None = None,
56
+ overall_project_root: str | None = None,
57
+ parsed_schemas: dict[str, IRSchema] | None = None,
58
58
  use_absolute_imports: bool = True,
59
- output_package_name: Optional[str] = None,
59
+ output_package_name: str | None = None,
60
60
  ) -> None:
61
61
  """
62
62
  Initialize a new RenderContext.
@@ -77,15 +77,15 @@ class RenderContext:
77
77
  self.file_manager = file_manager or FileManager()
78
78
  self.import_collector = ImportCollector()
79
79
  self.generated_modules: Set[str] = set()
80
- self.current_file: Optional[str] = None
80
+ self.current_file: str | None = None
81
81
  self.core_package_name: str = core_package_name
82
- self.package_root_for_generated_code: Optional[str] = package_root_for_generated_code
83
- self.overall_project_root: Optional[str] = overall_project_root or os.getcwd()
84
- self.parsed_schemas: Optional[Dict[str, IRSchema]] = parsed_schemas
82
+ self.package_root_for_generated_code: str | None = package_root_for_generated_code
83
+ self.overall_project_root: str | None = overall_project_root or os.getcwd()
84
+ self.parsed_schemas: dict[str, IRSchema] | None = parsed_schemas
85
85
  self.use_absolute_imports: bool = use_absolute_imports
86
- self.output_package_name: Optional[str] = output_package_name
86
+ self.output_package_name: str | None = output_package_name
87
87
  # Dictionary to store conditional imports, keyed by condition
88
- self.conditional_imports: Dict[str, Dict[str, Set[str]]] = {}
88
+ self.conditional_imports: dict[str, dict[str, Set[str]]] = {}
89
89
 
90
90
  def set_current_file(self, abs_path: str) -> None:
91
91
  """
@@ -111,7 +111,7 @@ class RenderContext:
111
111
  core_package_name_for_absolute_treatment=self.core_package_name,
112
112
  )
113
113
 
114
- def add_import(self, logical_module: str, name: Optional[str] = None, is_typing_import: bool = False) -> None:
114
+ def add_import(self, logical_module: str, name: str | None = None, is_typing_import: bool = False) -> None:
115
115
  """
116
116
  Add an import to the collector.
117
117
 
@@ -1,4 +1,4 @@
1
- from typing import Any, Awaitable, Callable, Dict, Optional
1
+ from typing import Any, Awaitable, Callable
2
2
 
3
3
  from .base import BaseAuth
4
4
 
@@ -9,7 +9,7 @@ class BearerAuth(BaseAuth):
9
9
  def __init__(self, token: str) -> None:
10
10
  self.token = token
11
11
 
12
- async def authenticate_request(self, request_args: Dict[str, Any]) -> Dict[str, Any]:
12
+ async def authenticate_request(self, request_args: dict[str, Any]) -> dict[str, Any]:
13
13
  # Ensure headers dict exists
14
14
  headers = dict(request_args.get("headers", {}))
15
15
  headers["Authorization"] = f"Bearer {self.token}"
@@ -20,10 +20,10 @@ class BearerAuth(BaseAuth):
20
20
  class HeadersAuth(BaseAuth):
21
21
  """Authentication plugin for arbitrary headers."""
22
22
 
23
- def __init__(self, headers: Dict[str, str]) -> None:
23
+ def __init__(self, headers: dict[str, str]) -> None:
24
24
  self.headers = headers
25
25
 
26
- async def authenticate_request(self, request_args: Dict[str, Any]) -> Dict[str, Any]:
26
+ async def authenticate_request(self, request_args: dict[str, Any]) -> dict[str, Any]:
27
27
  # Merge custom headers
28
28
  hdrs = dict(request_args.get("headers", {}))
29
29
  hdrs.update(self.headers)
@@ -45,7 +45,7 @@ class ApiKeyAuth(BaseAuth):
45
45
  self.location = location
46
46
  self.name = name
47
47
 
48
- async def authenticate_request(self, request_args: Dict[str, Any]) -> Dict[str, Any]:
48
+ async def authenticate_request(self, request_args: dict[str, Any]) -> dict[str, Any]:
49
49
  if self.location == "header":
50
50
  headers = dict(request_args.get("headers", {}))
51
51
  headers[self.name] = self.key
@@ -66,7 +66,7 @@ class ApiKeyAuth(BaseAuth):
66
66
  class OAuth2Auth(BaseAuth):
67
67
  """Authentication plugin for OAuth2 Bearer tokens, with optional auto-refresh."""
68
68
 
69
- def __init__(self, access_token: str, refresh_callback: Optional[Callable[[str], Awaitable[str]]] = None) -> None:
69
+ def __init__(self, access_token: str, refresh_callback: Callable[[str], Awaitable[str]] | None = None) -> None:
70
70
  """
71
71
  Args:
72
72
  access_token: The OAuth2 access token.
@@ -76,7 +76,7 @@ class OAuth2Auth(BaseAuth):
76
76
  self.access_token = access_token
77
77
  self.refresh_callback = refresh_callback
78
78
 
79
- async def authenticate_request(self, request_args: Dict[str, Any]) -> Dict[str, Any]:
79
+ async def authenticate_request(self, request_args: dict[str, Any]) -> dict[str, Any]:
80
80
  # In a real implementation, check expiry and refresh if needed
81
81
  if self.refresh_callback is not None:
82
82
  # Optionally refresh token (user must implement expiry logic)
@@ -8,10 +8,8 @@ References:
8
8
  - IANA HTTP Status Code Registry: https://www.iana.org/assignments/http-status-codes/
9
9
  """
10
10
 
11
- from typing import Dict
12
-
13
11
  # Standard HTTP status codes with human-readable names
14
- HTTP_STATUS_CODES: Dict[int, str] = {
12
+ HTTP_STATUS_CODES: dict[int, str] = {
15
13
  # 1xx Informational
16
14
  100: "Continue",
17
15
  101: "Switching Protocols",
@@ -144,7 +142,7 @@ def is_success_code(code: int) -> bool:
144
142
 
145
143
  # Mapping of HTTP status codes to Python exception class names
146
144
  # These are semantically meaningful names that Python developers expect
147
- HTTP_EXCEPTION_NAMES: Dict[int, str] = {
145
+ HTTP_EXCEPTION_NAMES: dict[int, str] = {
148
146
  # 4xx Client Errors
149
147
  400: "BadRequestError",
150
148
  401: "UnauthorisedError",
@@ -1,4 +1,4 @@
1
- from typing import Any, Dict, Optional, Protocol
1
+ from typing import Any, Protocol
2
2
 
3
3
  import httpx
4
4
 
@@ -83,47 +83,47 @@ class HttpxTransport:
83
83
 
84
84
  Attributes:
85
85
  _client (httpx.AsyncClient): Configured HTTPX async client for all requests.
86
- _auth (Optional[BaseAuth]): Optional authentication plugin for request signing (can be CompositeAuth).
87
- _bearer_token (Optional[str]): Optional bearer token for Authorization header.
88
- _default_headers (Optional[Dict[str, str]]): Default headers to apply to all requests.
86
+ _auth (BaseAuth | None): Optional authentication plugin for request signing (can be CompositeAuth).
87
+ _bearer_token (str | None): Optional bearer token for Authorization header.
88
+ _default_headers (dict[str, str] | None): Default headers to apply to all requests.
89
89
  """
90
90
 
91
91
  def __init__(
92
92
  self,
93
93
  base_url: str,
94
- timeout: Optional[float] = None,
95
- auth: Optional[BaseAuth] = None,
96
- bearer_token: Optional[str] = None,
97
- default_headers: Optional[Dict[str, str]] = None,
94
+ timeout: float | None = None,
95
+ auth: BaseAuth | None = None,
96
+ bearer_token: str | None = None,
97
+ default_headers: dict[str, str] | None = None,
98
98
  ) -> None:
99
99
  """
100
100
  Initializes the HttpxTransport.
101
101
 
102
102
  Args:
103
103
  base_url (str): The base URL for all API requests made through this transport.
104
- timeout (Optional[float]): The default timeout in seconds for requests. If None, httpx's default is used.
105
- auth (Optional[BaseAuth]): Optional authentication plugin for request signing (can be CompositeAuth).
106
- bearer_token (Optional[str]): Optional raw bearer token string for Authorization header.
107
- default_headers (Optional[Dict[str, str]]): Default headers to apply to all requests.
104
+ timeout (float | None): The default timeout in seconds for requests. If None, httpx's default is used.
105
+ auth (BaseAuth | None): Optional authentication plugin for request signing (can be CompositeAuth).
106
+ bearer_token (str | None): Optional raw bearer token string for Authorization header.
107
+ default_headers (dict[str, str] | None): Default headers to apply to all requests.
108
108
 
109
109
  Note:
110
110
  If both auth and bearer_token are provided, auth takes precedence.
111
111
  """
112
112
  self._client: httpx.AsyncClient = httpx.AsyncClient(base_url=base_url, timeout=timeout)
113
- self._auth: Optional[BaseAuth] = auth
114
- self._bearer_token: Optional[str] = bearer_token
115
- self._default_headers: Optional[Dict[str, str]] = default_headers
113
+ self._auth: BaseAuth | None = auth
114
+ self._bearer_token: str | None = bearer_token
115
+ self._default_headers: dict[str, str] | None = default_headers
116
116
 
117
117
  async def _prepare_headers(
118
118
  self,
119
- current_request_kwargs: Dict[str, Any],
120
- ) -> Dict[str, str]:
119
+ current_request_kwargs: dict[str, Any],
120
+ ) -> dict[str, str]:
121
121
  """
122
122
  Prepares headers for an HTTP request, incorporating default headers,
123
123
  request-specific headers, and authentication.
124
124
  """
125
125
  # Initialize headers for the current request
126
- prepared_headers: Dict[str, str] = {}
126
+ prepared_headers: dict[str, str] = {}
127
127
 
128
128
  # 1. Apply transport-level default headers
129
129
  if self._default_headers:
@@ -180,7 +180,7 @@ class HttpxTransport:
180
180
  HTTPError: For non-2xx HTTP responses.
181
181
  """
182
182
  # Prepare request arguments, excluding headers initially
183
- request_args: Dict[str, Any] = {k: v for k, v in kwargs.items() if k != "headers"}
183
+ request_args: dict[str, Any] = {k: v for k, v in kwargs.items() if k != "headers"}
184
184
 
185
185
  # This method handles default headers, request-specific headers, and authentication
186
186
  prepared_headers = await self._prepare_headers(kwargs)
@@ -7,7 +7,7 @@ from __future__ import annotations
7
7
 
8
8
  import logging
9
9
  import warnings
10
- from typing import Any, List, Mapping, Optional, cast
10
+ from typing import Any, List, Mapping, cast
11
11
 
12
12
  from pyopenapi_gen import HTTPMethod, IROperation, IRParameter, IRRequestBody, IRResponse
13
13
  from pyopenapi_gen.core.loader.operations.post_processor import post_process_operation
@@ -96,7 +96,7 @@ def parse_operations(
96
96
  params.append(parse_parameter(resolved_p_param_node, context, operation_id_for_promo=operation_id))
97
97
 
98
98
  # Parse request body
99
- rb: Optional[IRRequestBody] = None
99
+ rb: IRRequestBody | None = None
100
100
  if "requestBody" in node_op:
101
101
  rb = parse_request_body(
102
102
  cast(Mapping[str, Any], node_op["requestBody"]),
@@ -6,7 +6,7 @@ Provides functions to parse and transform OpenAPI request bodies into IR format.
6
6
  from __future__ import annotations
7
7
 
8
8
  import logging
9
- from typing import Any, Dict, Mapping, Optional
9
+ from typing import Any, Mapping
10
10
 
11
11
  from pyopenapi_gen import IRRequestBody, IRSchema
12
12
  from pyopenapi_gen.core.parsing.context import ParsingContext
@@ -20,7 +20,7 @@ def parse_request_body(
20
20
  raw_request_bodies: Mapping[str, Any],
21
21
  context: ParsingContext,
22
22
  operation_id: str,
23
- ) -> Optional[IRRequestBody]:
23
+ ) -> IRRequestBody | None:
24
24
  """Parse a request body node into an IRRequestBody.
25
25
 
26
26
  Contracts:
@@ -55,7 +55,7 @@ def parse_request_body(
55
55
 
56
56
  required_flag = bool(resolved_rb_node.get("required", False))
57
57
  desc = resolved_rb_node.get("description")
58
- content_map: Dict[str, IRSchema] = {}
58
+ content_map: dict[str, IRSchema] = {}
59
59
 
60
60
  parent_promo_name_for_req_body = f"{operation_id}RequestBody"
61
61
 
@@ -6,7 +6,7 @@ Provides functions to parse and transform OpenAPI parameters into IR format.
6
6
  from __future__ import annotations
7
7
 
8
8
  import logging
9
- from typing import Any, Mapping, Optional, cast
9
+ from typing import Any, Mapping, cast
10
10
 
11
11
  from pyopenapi_gen import IRParameter, IRSchema
12
12
  from pyopenapi_gen.core.parsing.context import ParsingContext
@@ -51,7 +51,7 @@ def resolve_parameter_node_if_ref(param_node_data: Mapping[str, Any], context: P
51
51
  def parse_parameter(
52
52
  node: Mapping[str, Any],
53
53
  context: ParsingContext,
54
- operation_id_for_promo: Optional[str] = None,
54
+ operation_id_for_promo: str | None = None,
55
55
  ) -> IRParameter:
56
56
  """Convert an OpenAPI parameter node into IRParameter.
57
57
 
@@ -74,7 +74,7 @@ def parse_parameter(
74
74
  sch = node.get("schema")
75
75
  param_name = node["name"]
76
76
 
77
- name_for_inline_param_schema: Optional[str] = None
77
+ name_for_inline_param_schema: str | None = None
78
78
  if (
79
79
  sch
80
80
  and isinstance(sch, Mapping)
@@ -6,7 +6,7 @@ Provides functions to parse and transform OpenAPI responses into IR format.
6
6
  from __future__ import annotations
7
7
 
8
8
  import logging
9
- from typing import Any, Dict, Mapping
9
+ from typing import Any, Mapping
10
10
 
11
11
  from pyopenapi_gen import IRResponse, IRSchema
12
12
  from pyopenapi_gen.core.parsing.context import ParsingContext
@@ -43,7 +43,7 @@ def parse_response(
43
43
  if not operation_id_for_promo:
44
44
  raise ValueError("operation_id_for_promo must be provided")
45
45
 
46
- content: Dict[str, IRSchema] = {}
46
+ content: dict[str, IRSchema] = {}
47
47
  STREAM_FORMATS = {
48
48
  "application/octet-stream": "octet-stream",
49
49
  "text/event-stream": "event-stream",
@@ -7,7 +7,7 @@ from __future__ import annotations
7
7
 
8
8
  import copy
9
9
  import logging
10
- from typing import Any, Dict, Mapping
10
+ from typing import Any, Mapping
11
11
 
12
12
  from pyopenapi_gen import IRSchema
13
13
  from pyopenapi_gen.core.parsing.context import ParsingContext
@@ -17,7 +17,7 @@ from pyopenapi_gen.core.utils import NameSanitizer
17
17
  logger = logging.getLogger(__name__)
18
18
 
19
19
 
20
- def build_schemas(raw_schemas: Dict[str, Mapping[str, Any]], raw_components: Mapping[str, Any]) -> ParsingContext:
20
+ def build_schemas(raw_schemas: dict[str, Mapping[str, Any]], raw_components: Mapping[str, Any]) -> ParsingContext:
21
21
  """Build all named schemas up front, populating a ParsingContext.
22
22
 
23
23
  Contracts:
@@ -47,7 +47,7 @@ def build_schemas(raw_schemas: Dict[str, Mapping[str, Any]], raw_components: Map
47
47
  return context
48
48
 
49
49
 
50
- def extract_inline_array_items(schemas: Dict[str, IRSchema]) -> Dict[str, IRSchema]:
50
+ def extract_inline_array_items(schemas: dict[str, IRSchema]) -> dict[str, IRSchema]:
51
51
  """Extract inline array item schemas as unique named schemas and update references.
52
52
 
53
53
  Contracts:
@@ -132,7 +132,7 @@ def extract_inline_array_items(schemas: Dict[str, IRSchema]) -> Dict[str, IRSche
132
132
  return schemas
133
133
 
134
134
 
135
- def extract_inline_enums(schemas: Dict[str, IRSchema]) -> Dict[str, IRSchema]:
135
+ def extract_inline_enums(schemas: dict[str, IRSchema]) -> dict[str, IRSchema]:
136
136
  """Extract inline property enums as unique schemas and update property references.
137
137
 
138
138
  Also ensures top-level enum schemas are properly marked for generation.
@@ -6,11 +6,11 @@ turning them into convenient async iterators that automatically handle
6
6
  fetching subsequent pages.
7
7
  """
8
8
 
9
- from typing import Any, AsyncIterator, Awaitable, Callable, Dict
9
+ from typing import Any, AsyncIterator, Awaitable, Callable
10
10
 
11
11
 
12
12
  def paginate_by_next(
13
- fetch_page: Callable[..., Awaitable[Dict[str, Any]]],
13
+ fetch_page: Callable[..., Awaitable[dict[str, Any]]],
14
14
  items_key: str = "items",
15
15
  next_key: str = "next",
16
16
  **params: Any,
@@ -52,7 +52,7 @@ def paginate_by_next(
52
52
  while True:
53
53
  result = await fetch_page(**params)
54
54
  # result is expected to be a dict
55
- # (assumed since fetch_page is typed to return Dict[str, Any])
55
+ # (assumed since fetch_page is typed to return dict[str, Any])
56
56
  items = result.get(items_key, [])
57
57
  for item in items:
58
58
  yield item
@@ -3,7 +3,7 @@ Module for handling ListResponse fallback strategy.
3
3
  """
4
4
 
5
5
  import logging
6
- from typing import Any, Callable, Mapping, Optional
6
+ from typing import Any, Callable, Mapping
7
7
 
8
8
  from pyopenapi_gen.ir import IRSchema
9
9
 
@@ -17,8 +17,8 @@ def try_list_response_fallback(
17
17
  ref_value: str,
18
18
  context: ParsingContext,
19
19
  max_depth: int,
20
- parse_fn: Callable[[Optional[str], Optional[Mapping[str, Any]], ParsingContext, int], IRSchema],
21
- ) -> Optional[IRSchema]:
20
+ parse_fn: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema],
21
+ ) -> IRSchema | None:
22
22
  """
23
23
  Attempts to resolve a reference by treating it as a list of a base type.
24
24
 
@@ -3,7 +3,7 @@ Module for handling missing schema references.
3
3
  """
4
4
 
5
5
  import logging
6
- from typing import Any, Callable, Mapping, Optional
6
+ from typing import Any, Callable, Mapping
7
7
 
8
8
  from pyopenapi_gen.ir import IRSchema
9
9
 
@@ -19,7 +19,7 @@ def handle_missing_ref(
19
19
  ref_name: str,
20
20
  context: ParsingContext,
21
21
  max_depth: int,
22
- parse_fn: Callable[[Optional[str], Optional[Mapping[str, Any]], ParsingContext, int], IRSchema],
22
+ parse_fn: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema],
23
23
  ) -> IRSchema:
24
24
  """
25
25
  Handles a missing schema reference by attempting fallback strategies.
@@ -3,7 +3,7 @@ Module for handling new schema references.
3
3
  """
4
4
 
5
5
  import logging
6
- from typing import Any, Callable, Dict, Mapping, Optional
6
+ from typing import Any, Callable, Mapping
7
7
 
8
8
  from pyopenapi_gen.ir import IRSchema
9
9
 
@@ -15,10 +15,10 @@ logger = logging.getLogger(__name__)
15
15
 
16
16
  def parse_new_schema(
17
17
  ref_name: str,
18
- node_data: Dict[str, Any],
18
+ node_data: dict[str, Any],
19
19
  context: ParsingContext,
20
20
  max_depth: int,
21
- parse_fn: Callable[[Optional[str], Optional[Mapping[str, Any]], ParsingContext, int], IRSchema],
21
+ parse_fn: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema],
22
22
  ) -> IRSchema:
23
23
  """
24
24
  Parses a new schema from raw data.
@@ -3,7 +3,7 @@ Module for handling stripped suffix fallback strategy.
3
3
  """
4
4
 
5
5
  import logging
6
- from typing import Any, Callable, Mapping, Optional
6
+ from typing import Any, Callable, Mapping
7
7
 
8
8
  from pyopenapi_gen.ir import IRSchema
9
9
 
@@ -17,8 +17,8 @@ def try_stripped_suffix_fallback(
17
17
  ref_value: str,
18
18
  context: ParsingContext,
19
19
  max_depth: int,
20
- parse_fn: Callable[[Optional[str], Optional[Mapping[str, Any]], ParsingContext, int], IRSchema],
21
- ) -> Optional[IRSchema]:
20
+ parse_fn: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema],
21
+ ) -> IRSchema | None:
22
22
  """
23
23
  Attempts to resolve a reference by stripping common suffixes.
24
24
 
@@ -3,7 +3,7 @@ Main module for schema reference resolution.
3
3
  """
4
4
 
5
5
  import logging
6
- from typing import Any, Callable, Mapping, Optional
6
+ from typing import Any, Callable, Mapping
7
7
 
8
8
  from pyopenapi_gen.ir import IRSchema
9
9
 
@@ -21,7 +21,7 @@ def resolve_schema_ref(
21
21
  ref_name: str,
22
22
  context: ParsingContext,
23
23
  max_depth: int,
24
- _parse_schema: Callable[[Optional[str], Optional[Mapping[str, Any]], ParsingContext, int], IRSchema],
24
+ _parse_schema: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema],
25
25
  ) -> IRSchema:
26
26
  """
27
27
  Resolves a schema reference in an OpenAPI specification.
@@ -7,7 +7,6 @@ from __future__ import annotations
7
7
  from typing import (
8
8
  Any,
9
9
  List,
10
- Optional,
11
10
  Tuple,
12
11
  )
13
12
 
@@ -15,8 +14,8 @@ from typing import (
15
14
 
16
15
 
17
16
  def extract_primary_type_and_nullability(
18
- type_node: Any, schema_name: Optional[str] = None
19
- ) -> Tuple[Optional[str], bool, List[str]]:
17
+ type_node: Any, schema_name: str | None = None
18
+ ) -> Tuple[str | None, bool, List[str]]:
20
19
  """Extract the primary type and nullability from a schema's 'type' field.
21
20
 
22
21
  Contracts:
@@ -7,7 +7,7 @@ from __future__ import annotations
7
7
  import logging
8
8
  import os
9
9
  from dataclasses import dataclass, field
10
- from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Set, Tuple
10
+ from typing import TYPE_CHECKING, Any, List, Mapping, Set, Tuple
11
11
 
12
12
  if TYPE_CHECKING:
13
13
  from pyopenapi_gen import IRSchema
@@ -21,12 +21,12 @@ logger = logging.getLogger(__name__)
21
21
  class ParsingContext:
22
22
  """Manages shared state and context during the schema parsing process."""
23
23
 
24
- raw_spec_schemas: Dict[str, Mapping[str, Any]] = field(default_factory=dict)
24
+ raw_spec_schemas: dict[str, Mapping[str, Any]] = field(default_factory=dict)
25
25
  raw_spec_components: Mapping[str, Any] = field(default_factory=dict)
26
- parsed_schemas: Dict[str, IRSchema] = field(default_factory=dict)
26
+ parsed_schemas: dict[str, IRSchema] = field(default_factory=dict)
27
27
  visited_refs: Set[str] = field(default_factory=set)
28
28
  global_schema_names: Set[str] = field(default_factory=set)
29
- package_root_name: Optional[str] = None
29
+ package_root_name: str | None = None
30
30
  # name_sanitizer: NameSanitizer = field(default_factory=NameSanitizer) # Decided to instantiate where needed for now
31
31
  collected_warnings: List[str] = field(default_factory=list) # For collecting warnings from helpers
32
32
 
@@ -51,7 +51,7 @@ class ParsingContext:
51
51
  max_depth=max_depth, # Share the same parsed_schemas dict
52
52
  )
53
53
 
54
- def unified_enter_schema(self, schema_name: Optional[str]) -> Any:
54
+ def unified_enter_schema(self, schema_name: str | None) -> Any:
55
55
  """Enter schema using unified cycle detection system."""
56
56
  from .unified_cycle_detection import unified_enter_schema
57
57
 
@@ -64,7 +64,7 @@ class ParsingContext:
64
64
 
65
65
  return result
66
66
 
67
- def unified_exit_schema(self, schema_name: Optional[str]) -> None:
67
+ def unified_exit_schema(self, schema_name: str | None) -> None:
68
68
  """Exit schema using unified cycle detection system."""
69
69
  from .unified_cycle_detection import unified_exit_schema
70
70
 
@@ -89,7 +89,7 @@ class ParsingContext:
89
89
  self.unified_cycle_context.depth_exceeded_schemas.clear()
90
90
  self.unified_cycle_context.cycle_detected = False
91
91
 
92
- def enter_schema(self, schema_name: Optional[str]) -> Tuple[bool, Optional[str]]:
92
+ def enter_schema(self, schema_name: str | None) -> Tuple[bool, str | None]:
93
93
  self.recursion_depth += 1
94
94
 
95
95
  if schema_name is None:
@@ -113,7 +113,7 @@ class ParsingContext:
113
113
  self.currently_parsing.append(schema_name)
114
114
  return False, None
115
115
 
116
- def exit_schema(self, schema_name: Optional[str]) -> None:
116
+ def exit_schema(self, schema_name: str | None) -> None:
117
117
  if self.recursion_depth == 0:
118
118
  self.logger.error("Cannot exit schema: recursion depth would go below zero.")
119
119
  return
@@ -149,7 +149,7 @@ class ParsingContext:
149
149
  """Helper to get a string representation of the current parsing path for logs."""
150
150
  return " -> ".join(self.currently_parsing)
151
151
 
152
- def get_parsed_schemas_for_emitter(self) -> Dict[str, IRSchema]:
152
+ def get_parsed_schemas_for_emitter(self) -> dict[str, IRSchema]:
153
153
  # ---- START RESTORE ----
154
154
  return {
155
155
  name: schema
@@ -173,7 +173,7 @@ class ParsingContext:
173
173
  raise TypeError("schema_name must be a string")
174
174
  return schema_name in self.parsed_schemas
175
175
 
176
- def get_parsed_schema(self, schema_name: str) -> Optional["IRSchema"]:
176
+ def get_parsed_schema(self, schema_name: str) -> "IRSchema" | None:
177
177
  """Get a parsed schema by its name.
178
178
 
179
179
  Contracts: