pyopenapi-gen 2.7.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (137) hide show
  1. pyopenapi_gen/__init__.py +224 -0
  2. pyopenapi_gen/__main__.py +6 -0
  3. pyopenapi_gen/cli.py +62 -0
  4. pyopenapi_gen/context/CLAUDE.md +284 -0
  5. pyopenapi_gen/context/file_manager.py +52 -0
  6. pyopenapi_gen/context/import_collector.py +382 -0
  7. pyopenapi_gen/context/render_context.py +726 -0
  8. pyopenapi_gen/core/CLAUDE.md +224 -0
  9. pyopenapi_gen/core/__init__.py +0 -0
  10. pyopenapi_gen/core/auth/base.py +22 -0
  11. pyopenapi_gen/core/auth/plugins.py +89 -0
  12. pyopenapi_gen/core/cattrs_converter.py +810 -0
  13. pyopenapi_gen/core/exceptions.py +20 -0
  14. pyopenapi_gen/core/http_status_codes.py +218 -0
  15. pyopenapi_gen/core/http_transport.py +222 -0
  16. pyopenapi_gen/core/loader/__init__.py +12 -0
  17. pyopenapi_gen/core/loader/loader.py +174 -0
  18. pyopenapi_gen/core/loader/operations/__init__.py +12 -0
  19. pyopenapi_gen/core/loader/operations/parser.py +161 -0
  20. pyopenapi_gen/core/loader/operations/post_processor.py +62 -0
  21. pyopenapi_gen/core/loader/operations/request_body.py +90 -0
  22. pyopenapi_gen/core/loader/parameters/__init__.py +10 -0
  23. pyopenapi_gen/core/loader/parameters/parser.py +186 -0
  24. pyopenapi_gen/core/loader/responses/__init__.py +10 -0
  25. pyopenapi_gen/core/loader/responses/parser.py +111 -0
  26. pyopenapi_gen/core/loader/schemas/__init__.py +11 -0
  27. pyopenapi_gen/core/loader/schemas/extractor.py +275 -0
  28. pyopenapi_gen/core/pagination.py +64 -0
  29. pyopenapi_gen/core/parsing/__init__.py +13 -0
  30. pyopenapi_gen/core/parsing/common/__init__.py +1 -0
  31. pyopenapi_gen/core/parsing/common/ref_resolution/__init__.py +9 -0
  32. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/__init__.py +0 -0
  33. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/cyclic_properties.py +66 -0
  34. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/direct_cycle.py +33 -0
  35. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/existing_schema.py +22 -0
  36. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/list_response.py +54 -0
  37. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/missing_ref.py +52 -0
  38. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/new_schema.py +50 -0
  39. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/stripped_suffix.py +51 -0
  40. pyopenapi_gen/core/parsing/common/ref_resolution/resolve_schema_ref.py +86 -0
  41. pyopenapi_gen/core/parsing/common/type_parser.py +73 -0
  42. pyopenapi_gen/core/parsing/context.py +187 -0
  43. pyopenapi_gen/core/parsing/cycle_helpers.py +126 -0
  44. pyopenapi_gen/core/parsing/keywords/__init__.py +1 -0
  45. pyopenapi_gen/core/parsing/keywords/all_of_parser.py +81 -0
  46. pyopenapi_gen/core/parsing/keywords/any_of_parser.py +84 -0
  47. pyopenapi_gen/core/parsing/keywords/array_items_parser.py +72 -0
  48. pyopenapi_gen/core/parsing/keywords/one_of_parser.py +77 -0
  49. pyopenapi_gen/core/parsing/keywords/properties_parser.py +98 -0
  50. pyopenapi_gen/core/parsing/schema_finalizer.py +169 -0
  51. pyopenapi_gen/core/parsing/schema_parser.py +804 -0
  52. pyopenapi_gen/core/parsing/transformers/__init__.py +0 -0
  53. pyopenapi_gen/core/parsing/transformers/inline_enum_extractor.py +285 -0
  54. pyopenapi_gen/core/parsing/transformers/inline_object_promoter.py +120 -0
  55. pyopenapi_gen/core/parsing/unified_cycle_detection.py +293 -0
  56. pyopenapi_gen/core/postprocess_manager.py +260 -0
  57. pyopenapi_gen/core/spec_fetcher.py +148 -0
  58. pyopenapi_gen/core/streaming_helpers.py +84 -0
  59. pyopenapi_gen/core/telemetry.py +69 -0
  60. pyopenapi_gen/core/utils.py +456 -0
  61. pyopenapi_gen/core/warning_collector.py +83 -0
  62. pyopenapi_gen/core/writers/code_writer.py +135 -0
  63. pyopenapi_gen/core/writers/documentation_writer.py +222 -0
  64. pyopenapi_gen/core/writers/line_writer.py +217 -0
  65. pyopenapi_gen/core/writers/python_construct_renderer.py +321 -0
  66. pyopenapi_gen/core_package_template/README.md +21 -0
  67. pyopenapi_gen/emit/models_emitter.py +143 -0
  68. pyopenapi_gen/emitters/CLAUDE.md +286 -0
  69. pyopenapi_gen/emitters/client_emitter.py +51 -0
  70. pyopenapi_gen/emitters/core_emitter.py +181 -0
  71. pyopenapi_gen/emitters/docs_emitter.py +44 -0
  72. pyopenapi_gen/emitters/endpoints_emitter.py +247 -0
  73. pyopenapi_gen/emitters/exceptions_emitter.py +187 -0
  74. pyopenapi_gen/emitters/mocks_emitter.py +185 -0
  75. pyopenapi_gen/emitters/models_emitter.py +426 -0
  76. pyopenapi_gen/generator/CLAUDE.md +352 -0
  77. pyopenapi_gen/generator/client_generator.py +567 -0
  78. pyopenapi_gen/generator/exceptions.py +7 -0
  79. pyopenapi_gen/helpers/CLAUDE.md +325 -0
  80. pyopenapi_gen/helpers/__init__.py +1 -0
  81. pyopenapi_gen/helpers/endpoint_utils.py +532 -0
  82. pyopenapi_gen/helpers/type_cleaner.py +334 -0
  83. pyopenapi_gen/helpers/type_helper.py +112 -0
  84. pyopenapi_gen/helpers/type_resolution/__init__.py +1 -0
  85. pyopenapi_gen/helpers/type_resolution/array_resolver.py +57 -0
  86. pyopenapi_gen/helpers/type_resolution/composition_resolver.py +79 -0
  87. pyopenapi_gen/helpers/type_resolution/finalizer.py +105 -0
  88. pyopenapi_gen/helpers/type_resolution/named_resolver.py +172 -0
  89. pyopenapi_gen/helpers/type_resolution/object_resolver.py +216 -0
  90. pyopenapi_gen/helpers/type_resolution/primitive_resolver.py +109 -0
  91. pyopenapi_gen/helpers/type_resolution/resolver.py +47 -0
  92. pyopenapi_gen/helpers/url_utils.py +14 -0
  93. pyopenapi_gen/http_types.py +20 -0
  94. pyopenapi_gen/ir.py +165 -0
  95. pyopenapi_gen/py.typed +1 -0
  96. pyopenapi_gen/types/CLAUDE.md +140 -0
  97. pyopenapi_gen/types/__init__.py +11 -0
  98. pyopenapi_gen/types/contracts/__init__.py +13 -0
  99. pyopenapi_gen/types/contracts/protocols.py +106 -0
  100. pyopenapi_gen/types/contracts/types.py +28 -0
  101. pyopenapi_gen/types/resolvers/__init__.py +7 -0
  102. pyopenapi_gen/types/resolvers/reference_resolver.py +71 -0
  103. pyopenapi_gen/types/resolvers/response_resolver.py +177 -0
  104. pyopenapi_gen/types/resolvers/schema_resolver.py +498 -0
  105. pyopenapi_gen/types/services/__init__.py +5 -0
  106. pyopenapi_gen/types/services/type_service.py +165 -0
  107. pyopenapi_gen/types/strategies/__init__.py +5 -0
  108. pyopenapi_gen/types/strategies/response_strategy.py +310 -0
  109. pyopenapi_gen/visit/CLAUDE.md +272 -0
  110. pyopenapi_gen/visit/client_visitor.py +477 -0
  111. pyopenapi_gen/visit/docs_visitor.py +38 -0
  112. pyopenapi_gen/visit/endpoint/__init__.py +1 -0
  113. pyopenapi_gen/visit/endpoint/endpoint_visitor.py +292 -0
  114. pyopenapi_gen/visit/endpoint/generators/__init__.py +1 -0
  115. pyopenapi_gen/visit/endpoint/generators/docstring_generator.py +123 -0
  116. pyopenapi_gen/visit/endpoint/generators/endpoint_method_generator.py +222 -0
  117. pyopenapi_gen/visit/endpoint/generators/mock_generator.py +140 -0
  118. pyopenapi_gen/visit/endpoint/generators/overload_generator.py +252 -0
  119. pyopenapi_gen/visit/endpoint/generators/request_generator.py +103 -0
  120. pyopenapi_gen/visit/endpoint/generators/response_handler_generator.py +705 -0
  121. pyopenapi_gen/visit/endpoint/generators/signature_generator.py +83 -0
  122. pyopenapi_gen/visit/endpoint/generators/url_args_generator.py +207 -0
  123. pyopenapi_gen/visit/endpoint/processors/__init__.py +1 -0
  124. pyopenapi_gen/visit/endpoint/processors/import_analyzer.py +78 -0
  125. pyopenapi_gen/visit/endpoint/processors/parameter_processor.py +171 -0
  126. pyopenapi_gen/visit/exception_visitor.py +90 -0
  127. pyopenapi_gen/visit/model/__init__.py +0 -0
  128. pyopenapi_gen/visit/model/alias_generator.py +93 -0
  129. pyopenapi_gen/visit/model/dataclass_generator.py +553 -0
  130. pyopenapi_gen/visit/model/enum_generator.py +212 -0
  131. pyopenapi_gen/visit/model/model_visitor.py +198 -0
  132. pyopenapi_gen/visit/visitor.py +97 -0
  133. pyopenapi_gen-2.7.2.dist-info/METADATA +1169 -0
  134. pyopenapi_gen-2.7.2.dist-info/RECORD +137 -0
  135. pyopenapi_gen-2.7.2.dist-info/WHEEL +4 -0
  136. pyopenapi_gen-2.7.2.dist-info/entry_points.txt +2 -0
  137. pyopenapi_gen-2.7.2.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,275 @@
1
+ """Schema extractors for OpenAPI IR transformation.
2
+
3
+ Provides functions to extract and transform schemas from raw OpenAPI specs.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import copy
9
+ import logging
10
+ from typing import Any, Mapping
11
+
12
+ from pyopenapi_gen import IRSchema
13
+ from pyopenapi_gen.core.parsing.context import ParsingContext
14
+ from pyopenapi_gen.core.parsing.schema_parser import _parse_schema
15
+ from pyopenapi_gen.core.utils import NameSanitizer
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+
20
+ def build_schemas(raw_schemas: dict[str, Mapping[str, Any]], raw_components: Mapping[str, Any]) -> ParsingContext:
21
+ """Build all named schemas up front, populating a ParsingContext.
22
+
23
+ Contracts:
24
+ Preconditions:
25
+ - raw_schemas is a valid dict containing schema definitions
26
+ - raw_components is a valid mapping containing component definitions
27
+ Postconditions:
28
+ - A ParsingContext is returned with all schemas parsed
29
+ - All schemas in raw_schemas are populated in context.parsed_schemas
30
+ """
31
+ if not isinstance(raw_schemas, dict):
32
+ raise TypeError("raw_schemas must be a dict")
33
+ if not isinstance(raw_components, Mapping):
34
+ raise TypeError("raw_components must be a Mapping")
35
+
36
+ context = ParsingContext(raw_spec_schemas=raw_schemas, raw_spec_components=raw_components)
37
+
38
+ # Build initial IR for all schemas found in components
39
+ for n, nd in raw_schemas.items():
40
+ if n not in context.parsed_schemas:
41
+ _parse_schema(n, nd, context, allow_self_reference=True)
42
+
43
+ # Post-condition check
44
+ if not all(n in context.parsed_schemas for n in raw_schemas):
45
+ raise RuntimeError("Not all schemas were parsed")
46
+
47
+ return context
48
+
49
+
50
+ def extract_inline_array_items(schemas: dict[str, IRSchema]) -> dict[str, IRSchema]:
51
+ """Extract inline array item schemas as unique named schemas and update references.
52
+
53
+ Contracts:
54
+ Preconditions:
55
+ - schemas is a dict of IRSchema objects
56
+ Postconditions:
57
+ - Returns an updated schemas dict with extracted array item types
58
+ - All array item schemas have proper names
59
+ - No duplicate schema names are created
60
+ """
61
+ if not isinstance(schemas, dict):
62
+ raise TypeError("schemas must be a dict")
63
+ if not all(isinstance(s, IRSchema) for s in schemas.values()):
64
+ raise TypeError("all values must be IRSchema objects")
65
+
66
+ # Store original schema count for post-condition validation
67
+ original_schema_count = len(schemas)
68
+ original_schemas = set(schemas.keys())
69
+
70
+ new_item_schemas = {}
71
+ for schema_name, schema in list(schemas.items()):
72
+ # Check properties for array types
73
+ for prop_name, prop_schema in list(schema.properties.items()):
74
+ if prop_schema.type == "array" and prop_schema.items and not prop_schema.items.name:
75
+ # Only extract complex item schemas (objects and arrays), not simple primitives or references
76
+ items_schema = prop_schema.items
77
+ # Check if items is a "null" type (malformed schema with no type) - these resolve to Any
78
+ is_null_type_items = items_schema.type == "null"
79
+ # Check if items is an empty object (no properties, no composition)
80
+ is_empty_object = (
81
+ items_schema.type == "object"
82
+ and not items_schema.properties
83
+ and not items_schema.any_of
84
+ and not items_schema.one_of
85
+ and not items_schema.all_of
86
+ )
87
+ is_complex_item = (
88
+ not is_null_type_items
89
+ and not is_empty_object
90
+ and (
91
+ items_schema.type == "object"
92
+ or items_schema.type == "array"
93
+ or items_schema.properties
94
+ or items_schema.any_of
95
+ or items_schema.one_of
96
+ or items_schema.all_of
97
+ )
98
+ )
99
+
100
+ if is_complex_item:
101
+ # Generate a descriptive name for the item schema using content-aware naming
102
+ # For arrays of complex objects, use the pattern: {Parent}{Property}Item
103
+ # For arrays in response wrappers (like "data" fields), consider the content type
104
+ if prop_name.lower() in ["data", "items", "results", "content"]:
105
+ # For generic wrapper properties, try to derive name from the item type or parent
106
+ if items_schema.type == "object" and schema_name.endswith("Response"):
107
+ # Pattern: MessageBatchResponse.data -> MessageItem
108
+ base_name = schema_name.replace("Response", "").replace("List", "")
109
+ item_schema_name = f"{base_name}Item"
110
+ else:
111
+ # Fallback to standard pattern
112
+ item_schema_name = (
113
+ f"{NameSanitizer.sanitize_class_name(schema_name)}"
114
+ f"{NameSanitizer.sanitize_class_name(prop_name)}Item"
115
+ )
116
+ else:
117
+ # Standard pattern for named properties
118
+ item_schema_name = (
119
+ f"{NameSanitizer.sanitize_class_name(schema_name)}"
120
+ f"{NameSanitizer.sanitize_class_name(prop_name)}Item"
121
+ )
122
+
123
+ base_item_name = item_schema_name
124
+ i = 1
125
+ while item_schema_name in schemas or item_schema_name in new_item_schemas:
126
+ item_schema_name = f"{base_item_name}{i}"
127
+ i += 1
128
+
129
+ # Create a copy of the item schema with a name
130
+ items_copy = copy.deepcopy(prop_schema.items)
131
+ items_copy.name = item_schema_name
132
+ new_item_schemas[item_schema_name] = items_copy
133
+
134
+ # Update the original array schema to reference the named item schema
135
+ prop_schema.items.name = item_schema_name
136
+
137
+ # Update the schemas dict with the new item schemas
138
+ schemas.update(new_item_schemas)
139
+
140
+ # Post-condition checks
141
+ if len(schemas) < original_schema_count:
142
+ raise RuntimeError("Schemas count should not decrease")
143
+ if not original_schemas.issubset(set(schemas.keys())):
144
+ raise RuntimeError("Original schemas should still be present")
145
+
146
+ return schemas
147
+
148
+
149
+ def extract_inline_enums(schemas: dict[str, IRSchema]) -> dict[str, IRSchema]:
150
+ """Extract inline property enums as unique schemas and update property references.
151
+
152
+ Also ensures top-level enum schemas are properly marked for generation.
153
+
154
+ Contracts:
155
+ Preconditions:
156
+ - schemas is a dict of IRSchema objects
157
+ Postconditions:
158
+ - Returns an updated schemas dict with extracted enum types and array item types
159
+ - All property schemas with enums have proper names
160
+ - All array item schemas have proper names
161
+ - No duplicate schema names are created
162
+ - Top-level enum schemas have generation_name set
163
+ """
164
+ if not isinstance(schemas, dict):
165
+ raise TypeError("schemas must be a dict")
166
+ if not all(isinstance(s, IRSchema) for s in schemas.values()):
167
+ raise TypeError("all values must be IRSchema objects")
168
+
169
+ # Store original schema count for post-condition validation
170
+ original_schema_count = len(schemas)
171
+ original_schemas = set(schemas.keys())
172
+
173
+ # First extract array item schemas so they can have enums extracted in the next step
174
+ schemas = extract_inline_array_items(schemas)
175
+
176
+ new_enums = {}
177
+ for schema_name, schema in list(schemas.items()):
178
+ # Handle top-level enum schemas (those defined directly in components/schemas)
179
+ # These are already enums but need generation_name set
180
+ if schema.enum and schema.type in ["string", "integer", "number"]:
181
+ # This is a top-level enum schema
182
+ # Ensure it has generation_name set (will be properly set by emitter later,
183
+ # but we can set it here to avoid the warning)
184
+ if not hasattr(schema, "generation_name") or not schema.generation_name:
185
+ schema.generation_name = schema.name
186
+ logger.info(
187
+ f"Set generation_name for top-level enum schema: {schema_name} with values {schema.enum[:3]}..."
188
+ )
189
+ # Mark this as a properly processed enum by ensuring generation_name is set
190
+ # This serves as the marker that this enum was properly processed
191
+ logger.debug(f"Marked top-level enum schema: {schema_name}")
192
+
193
+ # Extract inline enums from properties
194
+ for prop_name, prop_schema in list(schema.properties.items()):
195
+ # Check if this property has an inline enum that needs extraction
196
+ # An inline enum needs extraction if:
197
+ # 1. It has enum values defined
198
+ # 2. The enum doesn't already exist as a separate schema in the schemas dict
199
+ # Note: After schema parsing, property schemas have 'name' set to the property key
200
+ # and 'generation_name' set to a sanitised class name, but the enum itself
201
+ # isn't registered as a separate schema yet.
202
+ has_inline_enum = prop_schema.enum and prop_schema.type in ["string", "integer", "number"]
203
+
204
+ # Check if the enum was already extracted or is a named reference
205
+ # Case 1: generation_name exists in schemas dict (already extracted)
206
+ # Case 2: property name itself is a schema reference (e.g., ExistingStatusEnum)
207
+ enum_already_extracted = (
208
+ (
209
+ prop_schema.generation_name
210
+ and prop_schema.generation_name in schemas
211
+ and schemas[prop_schema.generation_name].enum
212
+ )
213
+ or (
214
+ # Property name is an explicit enum reference (class-like name, not property key)
215
+ prop_schema.name
216
+ and prop_schema.name in schemas
217
+ and schemas[prop_schema.name].enum
218
+ )
219
+ or (
220
+ # Property name looks like an enum class name (not a property key)
221
+ # Property keys are typically snake_case, class names are PascalCase
222
+ prop_schema.name
223
+ and prop_schema.name[0].isupper()
224
+ and "_" not in prop_schema.name
225
+ and prop_schema.name != prop_name # Name differs from property key
226
+ )
227
+ )
228
+
229
+ if has_inline_enum and not enum_already_extracted:
230
+ # Use property's existing generation_name if set, otherwise create a new name
231
+ # This keeps naming consistent with what the type resolver already assigned
232
+ if prop_schema.generation_name:
233
+ enum_name = prop_schema.generation_name
234
+ else:
235
+ enum_name = (
236
+ f"{NameSanitizer.sanitize_class_name(schema_name)}"
237
+ f"{NameSanitizer.sanitize_class_name(prop_name)}Enum"
238
+ )
239
+ base_enum_name = enum_name
240
+ i = 1
241
+ while enum_name in schemas or enum_name in new_enums:
242
+ enum_name = f"{base_enum_name}{i}"
243
+ i += 1
244
+
245
+ # Derive module stem from final enum name
246
+ module_stem = NameSanitizer.sanitize_module_name(enum_name)
247
+
248
+ enum_schema = IRSchema(
249
+ name=enum_name,
250
+ type=prop_schema.type,
251
+ enum=copy.deepcopy(prop_schema.enum),
252
+ description=prop_schema.description or f"Enum for {schema_name}.{prop_name}",
253
+ )
254
+ enum_schema.generation_name = enum_name
255
+ enum_schema.final_module_stem = module_stem
256
+ new_enums[enum_name] = enum_schema
257
+ logger.debug(f"Extracted inline enum from {schema_name}.{prop_name}: {enum_name}")
258
+
259
+ # Update the original property to reference the extracted enum
260
+ prop_schema.name = enum_name
261
+ prop_schema.type = enum_name # Make the property reference the enum by name
262
+ prop_schema.generation_name = enum_name # Ensure property also has correct generation_name
263
+ prop_schema.final_module_stem = module_stem # And module stem
264
+ prop_schema.enum = None # Clear the inline enum since it's now extracted
265
+
266
+ # Update the schemas dict with the new enums
267
+ schemas.update(new_enums)
268
+
269
+ # Post-condition checks
270
+ if len(schemas) < original_schema_count:
271
+ raise RuntimeError("Schemas count should not decrease")
272
+ if not original_schemas.issubset(set(schemas.keys())):
273
+ raise RuntimeError("Original schemas should still be present")
274
+
275
+ return schemas
@@ -0,0 +1,64 @@
1
+ """
2
+ Pagination utilities for handling paginated API endpoints.
3
+
4
+ This module provides functions for working with paginated API responses,
5
+ turning them into convenient async iterators that automatically handle
6
+ fetching subsequent pages.
7
+ """
8
+
9
+ from typing import Any, AsyncIterator, Awaitable, Callable
10
+
11
+
12
+ def paginate_by_next(
13
+ fetch_page: Callable[..., Awaitable[dict[str, Any]]],
14
+ items_key: str = "items",
15
+ next_key: str = "next",
16
+ **params: Any,
17
+ ) -> AsyncIterator[Any]:
18
+ """
19
+ Create an async iterator that yields items from paginated API responses.
20
+
21
+ This function creates a paginator that automatically handles fetching
22
+ subsequent pages of results by using a "next page token" pattern. It calls
23
+ the provided `fetch_page` function repeatedly with the given parameters,
24
+ updating the next token parameter between calls.
25
+
26
+ Args:
27
+ fetch_page: Async function to fetch a page of results
28
+ items_key: The key in the response dict where items are located (default: "items")
29
+ next_key: The key in the response dict for the next page token (default: "next")
30
+ **params: Initial parameters to pass to fetch_page
31
+
32
+ Returns:
33
+ An AsyncIterator that yields individual items from all pages
34
+
35
+ Example:
36
+ ```python
37
+ async def fetch_users_page(page_token=None, limit=100):
38
+ url = f"/users?limit={limit}"
39
+ if page_token:
40
+ url += f"&page_token={page_token}"
41
+ return await http_client.get(url)
42
+
43
+ async for user in paginate_by_next(fetch_users_page,
44
+ items_key="users",
45
+ next_key="page_token",
46
+ limit=50):
47
+ print(user["name"])
48
+ ```
49
+ """
50
+
51
+ async def _paginate() -> AsyncIterator[Any]:
52
+ while True:
53
+ result = await fetch_page(**params)
54
+ # result is expected to be a dict
55
+ # (assumed since fetch_page is typed to return dict[str, Any])
56
+ items = result.get(items_key, [])
57
+ for item in items:
58
+ yield item
59
+ token = result.get(next_key)
60
+ if not token:
61
+ break
62
+ params[next_key] = token
63
+
64
+ return _paginate()
@@ -0,0 +1,13 @@
1
+ # Initialize the parsing module
2
+
3
+ # Expose the main schema parsing entry point if desired,
4
+ # otherwise, it remains internal (_parse_schema).
5
+ # from .schema_parser import _parse_schema as parse_openapi_schema_node
6
+
7
+ # Other parsers can be imported here if they need to be part of the public API
8
+ # of this sub-package, though most are internal helpers for _parse_schema.
9
+ from typing import List
10
+
11
+ __all__: List[str] = [
12
+ # "parse_openapi_schema_node", # Example if we were to expose it
13
+ ]
@@ -0,0 +1 @@
1
+ # common parsing utilities
@@ -0,0 +1,9 @@
1
+ """
2
+ Module for handling schema reference resolution.
3
+ """
4
+
5
+ from .resolve_schema_ref import resolve_schema_ref
6
+
7
+ __all__ = [
8
+ "resolve_schema_ref",
9
+ ]
@@ -0,0 +1,66 @@
1
+ """
2
+ Helper module for handling cyclic property references in schemas.
3
+ """
4
+
5
+ import logging
6
+ from typing import Set
7
+
8
+ from pyopenapi_gen.ir import IRSchema
9
+
10
+ from ....context import ParsingContext
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ def mark_cyclic_property_references(schema_obj: IRSchema, ref_name: str, context: ParsingContext) -> None:
16
+ """
17
+ Marks properties in a schema that form cycles as unresolved.
18
+
19
+ Args:
20
+ schema_obj: The schema object to check for cycles
21
+ ref_name: The name of the schema being referenced
22
+ context: The parsing context
23
+
24
+ Pre-conditions:
25
+ - schema_obj is a valid IRSchema instance
26
+ - ref_name is a non-empty string
27
+ - context is a valid ParsingContext instance
28
+
29
+ Post-conditions:
30
+ - Properties that form cycles are marked as unresolved
31
+ - Non-cyclic properties remain unchanged
32
+ """
33
+ if not schema_obj.properties:
34
+ return
35
+
36
+ visited: Set[str] = set()
37
+
38
+ def _check_cycle(prop_name: str) -> bool:
39
+ if prop_name in visited:
40
+ return True
41
+ visited.add(prop_name)
42
+
43
+ prop_schema = schema_obj.properties.get(prop_name)
44
+ if not prop_schema or not prop_schema._refers_to_schema:
45
+ return False
46
+
47
+ if prop_schema._refers_to_schema.name == ref_name:
48
+ return True
49
+
50
+ if prop_schema._refers_to_schema.properties:
51
+ for nested_prop_name in prop_schema._refers_to_schema.properties:
52
+ nested_prop = prop_schema._refers_to_schema.properties[nested_prop_name]
53
+ if nested_prop._refers_to_schema and nested_prop._refers_to_schema.name == ref_name:
54
+ return True
55
+ if _check_cycle(nested_prop_name):
56
+ return True
57
+
58
+ return False
59
+
60
+ # Check each property for cycles
61
+ for prop_name, prop_schema in schema_obj.properties.items():
62
+ if _check_cycle(prop_name):
63
+ prop_schema._from_unresolved_ref = True
64
+ prop_schema._is_circular_ref = True
65
+ context.cycle_detected = True
66
+ logger.debug(f"Cyclic property reference detected: {ref_name}.{prop_name}")
@@ -0,0 +1,33 @@
1
+ """
2
+ Module for handling direct cycle detection.
3
+ """
4
+
5
+ import logging
6
+
7
+ from pyopenapi_gen.ir import IRSchema
8
+
9
+ from ....context import ParsingContext
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ def handle_direct_cycle(ref_name: str, context: ParsingContext) -> IRSchema:
15
+ """
16
+ Handles a direct cycle in schema references.
17
+
18
+ Contracts:
19
+ Pre-conditions:
20
+ - ref_name must be a valid schema name
21
+ - context must be a valid ParsingContext instance
22
+ - ref_name must exist in context.parsed_schemas
23
+ Post-conditions:
24
+ - Returns the existing schema from context.parsed_schemas
25
+ - The schema's _from_unresolved_ref flag is set to True
26
+ - The schema's _is_circular_ref flag is set to True (for harmonized cycle detection)
27
+ """
28
+ existing_schema = context.parsed_schemas[ref_name]
29
+ existing_schema._from_unresolved_ref = True
30
+ existing_schema._is_circular_ref = True # Harmonize with cycle detection contract
31
+ context.cycle_detected = True # Mark cycle in context
32
+ logger.debug(f"Direct cycle detected for schema '{ref_name}'")
33
+ return existing_schema
@@ -0,0 +1,22 @@
1
+ """
2
+ Module for handling existing schema references.
3
+ """
4
+
5
+ from pyopenapi_gen.ir import IRSchema
6
+
7
+ from ....context import ParsingContext
8
+
9
+
10
+ def handle_existing_schema(ref_name: str, context: ParsingContext) -> IRSchema:
11
+ """
12
+ Handles an existing schema reference.
13
+
14
+ Contracts:
15
+ Pre-conditions:
16
+ - ref_name must be a valid schema name
17
+ - context must be a valid ParsingContext instance
18
+ - ref_name must exist in context.parsed_schemas
19
+ Post-conditions:
20
+ - Returns the existing schema from context.parsed_schemas
21
+ """
22
+ return context.parsed_schemas[ref_name]
@@ -0,0 +1,54 @@
1
+ """
2
+ Module for handling ListResponse fallback strategy.
3
+ """
4
+
5
+ import logging
6
+ from typing import Any, Callable, Mapping
7
+
8
+ from pyopenapi_gen.ir import IRSchema
9
+
10
+ from ....context import ParsingContext
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ def try_list_response_fallback(
16
+ ref_name: str,
17
+ ref_value: str,
18
+ context: ParsingContext,
19
+ max_depth: int,
20
+ parse_fn: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema],
21
+ ) -> IRSchema | None:
22
+ """
23
+ Attempts to resolve a reference by treating it as a list of a base type.
24
+
25
+ Contracts:
26
+ Pre-conditions:
27
+ - ref_name must end with "ListResponse"
28
+ - parse_fn must be a callable that parses schemas
29
+ - context must be a valid ParsingContext instance
30
+ Post-conditions:
31
+ - If successful, returns an array IRSchema with items of the base type
32
+ - If unsuccessful, returns None
33
+ - Successful resolutions are added to context.parsed_schemas
34
+ """
35
+ list_response_suffix = "ListResponse"
36
+ if not ref_name.endswith(list_response_suffix):
37
+ return None
38
+
39
+ base_name = ref_name[: -len(list_response_suffix)]
40
+ referenced_node_data_fallback = context.raw_spec_schemas.get(base_name)
41
+
42
+ if not referenced_node_data_fallback:
43
+ return None
44
+
45
+ item_schema = parse_fn(base_name, referenced_node_data_fallback, context, max_depth)
46
+ if item_schema._from_unresolved_ref:
47
+ return None
48
+
49
+ warning_msg = f"Resolved $ref: {ref_value} by falling back to LIST of base name '{base_name}'."
50
+ context.collected_warnings.append(warning_msg)
51
+
52
+ resolved_schema = IRSchema(name=ref_name, type="array", items=item_schema)
53
+ context.parsed_schemas[ref_name] = resolved_schema
54
+ return resolved_schema
@@ -0,0 +1,52 @@
1
+ """
2
+ Module for handling missing schema references.
3
+ """
4
+
5
+ import logging
6
+ from typing import Any, Callable, Mapping
7
+
8
+ from pyopenapi_gen.ir import IRSchema
9
+
10
+ from ....context import ParsingContext
11
+ from .list_response import try_list_response_fallback
12
+ from .stripped_suffix import try_stripped_suffix_fallback
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ def handle_missing_ref(
18
+ ref_value: str,
19
+ ref_name: str,
20
+ context: ParsingContext,
21
+ max_depth: int,
22
+ parse_fn: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema],
23
+ ) -> IRSchema:
24
+ """
25
+ Handles a missing schema reference by attempting fallback strategies.
26
+
27
+ Contracts:
28
+ Pre-conditions:
29
+ - ref_value must be a valid reference string
30
+ - ref_name must be a valid schema name
31
+ - context must be a valid ParsingContext instance
32
+ - max_depth must be a non-negative integer
33
+ - parse_fn must be a callable that parses schemas
34
+ Post-conditions:
35
+ - Returns a valid IRSchema instance
36
+ - The schema is registered in context.parsed_schemas
37
+ - If no fallback succeeds, returns an unresolved schema
38
+ """
39
+ # Try ListResponse fallback
40
+ list_response_schema = try_list_response_fallback(ref_name, ref_value, context, max_depth, parse_fn)
41
+ if list_response_schema is not None:
42
+ return list_response_schema
43
+
44
+ # Try stripped suffix fallback
45
+ stripped_schema = try_stripped_suffix_fallback(ref_name, ref_value, context, max_depth, parse_fn)
46
+ if stripped_schema is not None:
47
+ return stripped_schema
48
+
49
+ # If all fallbacks fail, create an unresolved schema
50
+ unresolved_schema = IRSchema(name=ref_name, _from_unresolved_ref=True)
51
+ context.parsed_schemas[ref_name] = unresolved_schema
52
+ return unresolved_schema
@@ -0,0 +1,50 @@
1
+ """
2
+ Module for handling new schema references.
3
+ """
4
+
5
+ import logging
6
+ from typing import Any, Callable, Mapping
7
+
8
+ from pyopenapi_gen.ir import IRSchema
9
+
10
+ from ....context import ParsingContext
11
+ from .cyclic_properties import mark_cyclic_property_references
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ def parse_new_schema(
17
+ ref_name: str,
18
+ node_data: dict[str, Any],
19
+ context: ParsingContext,
20
+ max_depth: int,
21
+ parse_fn: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema],
22
+ ) -> IRSchema:
23
+ """
24
+ Parses a new schema from raw data.
25
+
26
+ Contracts:
27
+ Pre-conditions:
28
+ - ref_name must be a valid schema name not already fully parsed
29
+ - node_data must contain raw schema definition
30
+ - parse_fn must be a callable that parses schemas
31
+ - context must be a valid ParsingContext instance
32
+ Post-conditions:
33
+ - Returns a valid parsed IRSchema instance
34
+ - The schema is registered in context.parsed_schemas
35
+ - Cyclic property references are marked correctly
36
+ """
37
+ # Create stub to prevent infinite recursion during parsing
38
+ stub_schema = IRSchema(name=ref_name)
39
+ context.parsed_schemas[ref_name] = stub_schema
40
+
41
+ # Parse the actual schema
42
+ schema_obj = parse_fn(ref_name, node_data, context, max_depth)
43
+
44
+ # Update the entry in parsed_schemas with the fully parsed schema
45
+ context.parsed_schemas[ref_name] = schema_obj
46
+
47
+ # Mark any property references involved in cycles
48
+ mark_cyclic_property_references(schema_obj, ref_name, context)
49
+
50
+ return schema_obj