pyopenapi-gen 0.8.6__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyopenapi_gen/__init__.py +2 -2
- pyopenapi_gen/context/CLAUDE.md +284 -0
- pyopenapi_gen/context/import_collector.py +8 -8
- pyopenapi_gen/core/CLAUDE.md +224 -0
- pyopenapi_gen/core/loader/operations/parser.py +1 -1
- pyopenapi_gen/core/parsing/cycle_helpers.py +1 -1
- pyopenapi_gen/core/parsing/keywords/properties_parser.py +4 -4
- pyopenapi_gen/core/parsing/schema_parser.py +4 -4
- pyopenapi_gen/core/parsing/transformers/inline_enum_extractor.py +1 -1
- pyopenapi_gen/core/writers/python_construct_renderer.py +2 -2
- pyopenapi_gen/emitters/CLAUDE.md +286 -0
- pyopenapi_gen/emitters/endpoints_emitter.py +1 -1
- pyopenapi_gen/generator/CLAUDE.md +352 -0
- pyopenapi_gen/helpers/CLAUDE.md +325 -0
- pyopenapi_gen/helpers/endpoint_utils.py +2 -2
- pyopenapi_gen/helpers/type_cleaner.py +1 -1
- pyopenapi_gen/helpers/type_resolution/composition_resolver.py +1 -1
- pyopenapi_gen/helpers/type_resolution/finalizer.py +1 -1
- pyopenapi_gen/types/CLAUDE.md +140 -0
- pyopenapi_gen/types/resolvers/schema_resolver.py +2 -2
- pyopenapi_gen/visit/CLAUDE.md +272 -0
- pyopenapi_gen/visit/endpoint/generators/docstring_generator.py +1 -1
- pyopenapi_gen/visit/endpoint/generators/signature_generator.py +1 -1
- pyopenapi_gen/visit/endpoint/processors/parameter_processor.py +1 -1
- {pyopenapi_gen-0.8.6.dist-info → pyopenapi_gen-0.9.0.dist-info}/METADATA +56 -40
- {pyopenapi_gen-0.8.6.dist-info → pyopenapi_gen-0.9.0.dist-info}/RECORD +49 -42
- {pyopenapi_gen-0.8.6.dist-info → pyopenapi_gen-0.9.0.dist-info}/WHEEL +1 -1
- pyopenapi_gen-0.9.0.dist-info/entry_points.txt +3 -0
- pyopenapi_gen-0.8.6.dist-info/entry_points.txt +0 -2
- {pyopenapi_gen-0.8.6.dist-info/licenses → pyopenapi_gen-0.9.0.dist-info}/LICENSE +0 -0
pyopenapi_gen/__init__.py
CHANGED
@@ -42,8 +42,8 @@ __all__ = [
|
|
42
42
|
"WarningCollector",
|
43
43
|
]
|
44
44
|
|
45
|
-
# Semantic version of the generator core –
|
46
|
-
__version__: str = "0.
|
45
|
+
# Semantic version of the generator core – automatically managed by semantic-release.
|
46
|
+
__version__: str = "0.9.0"
|
47
47
|
|
48
48
|
|
49
49
|
# ---------------------------------------------------------------------------
|
@@ -0,0 +1,284 @@
|
|
1
|
+
# context/ - Rendering Context Management
|
2
|
+
|
3
|
+
## Why This Folder?
|
4
|
+
Manage stateful information during code generation: imports, templates, file paths, and rendering state. Provides clean interface between visitors and emitters.
|
5
|
+
|
6
|
+
## Key Dependencies
|
7
|
+
- **Input**: Path configuration, package names, template data
|
8
|
+
- **Output**: Import statements, resolved file paths, template rendering
|
9
|
+
- **Used by**: All visitors and emitters for consistent code generation
|
10
|
+
|
11
|
+
## Essential Architecture
|
12
|
+
|
13
|
+
### 1. Context Lifecycle
|
14
|
+
```python
|
15
|
+
# 1. Create context for generation session
|
16
|
+
context = RenderContext(project_root="/path/to/project", output_package="my_client")
|
17
|
+
|
18
|
+
# 2. Visitors use context for type resolution and imports
|
19
|
+
visitor.visit_schema(schema, context) # Registers imports
|
20
|
+
|
21
|
+
# 3. Emitters use context for file organization
|
22
|
+
emitter.emit_models(schemas, context) # Consumes imports
|
23
|
+
```
|
24
|
+
|
25
|
+
### 2. State Management
|
26
|
+
```python
|
27
|
+
# render_context.py
|
28
|
+
class RenderContext:
|
29
|
+
def __init__(self, project_root: Path, output_package: str):
|
30
|
+
self.import_collector = ImportCollector()
|
31
|
+
self.file_manager = FileManager(project_root)
|
32
|
+
self.template_vars = {}
|
33
|
+
self.output_package = output_package
|
34
|
+
self.forward_refs = set()
|
35
|
+
```
|
36
|
+
|
37
|
+
## Critical Components
|
38
|
+
|
39
|
+
### render_context.py
|
40
|
+
**Purpose**: Main context object passed through generation pipeline
|
41
|
+
```python
|
42
|
+
class RenderContext:
|
43
|
+
def add_import(self, import_statement: str) -> None:
|
44
|
+
"""Register import for current file being generated"""
|
45
|
+
self.import_collector.add_import(import_statement)
|
46
|
+
|
47
|
+
def get_imports(self) -> List[str]:
|
48
|
+
"""Get sorted, deduplicated imports for current file"""
|
49
|
+
return self.import_collector.get_sorted_imports()
|
50
|
+
|
51
|
+
def clear_imports(self) -> None:
|
52
|
+
"""Clear imports for next file generation"""
|
53
|
+
self.import_collector.clear()
|
54
|
+
|
55
|
+
def resolve_relative_import(self, from_package: str, to_package: str) -> str:
|
56
|
+
"""Convert absolute import to relative import"""
|
57
|
+
return self.import_collector.make_relative_import(from_package, to_package)
|
58
|
+
```
|
59
|
+
|
60
|
+
### import_collector.py
|
61
|
+
**Purpose**: Collect and manage import statements during code generation
|
62
|
+
```python
|
63
|
+
class ImportCollector:
|
64
|
+
def __init__(self):
|
65
|
+
self.imports: Set[str] = set()
|
66
|
+
self.from_imports: Dict[str, Set[str]] = defaultdict(set)
|
67
|
+
|
68
|
+
def add_import(self, import_statement: str) -> None:
|
69
|
+
"""Add import statement, handling both 'import' and 'from' forms"""
|
70
|
+
if import_statement.startswith("from "):
|
71
|
+
self.parse_from_import(import_statement)
|
72
|
+
else:
|
73
|
+
self.imports.add(import_statement)
|
74
|
+
|
75
|
+
def get_sorted_imports(self) -> List[str]:
|
76
|
+
"""Return sorted imports: stdlib, third-party, local"""
|
77
|
+
return self.sort_imports_by_category()
|
78
|
+
```
|
79
|
+
|
80
|
+
### file_manager.py
|
81
|
+
**Purpose**: Handle file operations and path resolution
|
82
|
+
```python
|
83
|
+
class FileManager:
|
84
|
+
def __init__(self, project_root: Path):
|
85
|
+
self.project_root = project_root
|
86
|
+
|
87
|
+
def write_file(self, file_path: Path, content: str) -> None:
|
88
|
+
"""Write file with proper directory creation"""
|
89
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
90
|
+
file_path.write_text(content)
|
91
|
+
|
92
|
+
def resolve_package_path(self, package_name: str) -> Path:
|
93
|
+
"""Convert package.name to file system path"""
|
94
|
+
parts = package_name.split(".")
|
95
|
+
return self.project_root / Path(*parts)
|
96
|
+
```
|
97
|
+
|
98
|
+
## Import Management Patterns
|
99
|
+
|
100
|
+
### 1. Import Categories
|
101
|
+
```python
|
102
|
+
# import_collector.py
|
103
|
+
def categorize_import(self, import_statement: str) -> ImportCategory:
|
104
|
+
"""Categorize imports for proper sorting"""
|
105
|
+
if self.is_stdlib_import(import_statement):
|
106
|
+
return ImportCategory.STDLIB
|
107
|
+
elif self.is_third_party_import(import_statement):
|
108
|
+
return ImportCategory.THIRD_PARTY
|
109
|
+
else:
|
110
|
+
return ImportCategory.LOCAL
|
111
|
+
```
|
112
|
+
|
113
|
+
### 2. From Import Consolidation
|
114
|
+
```python
|
115
|
+
# Convert multiple from imports to single statement
|
116
|
+
# "from typing import List"
|
117
|
+
# "from typing import Dict"
|
118
|
+
# →
|
119
|
+
# "from typing import Dict, List"
|
120
|
+
|
121
|
+
def consolidate_from_imports(self) -> List[str]:
|
122
|
+
consolidated = []
|
123
|
+
for module, imports in self.from_imports.items():
|
124
|
+
sorted_imports = sorted(imports)
|
125
|
+
consolidated.append(f"from {module} import {', '.join(sorted_imports)}")
|
126
|
+
return consolidated
|
127
|
+
```
|
128
|
+
|
129
|
+
### 3. Relative Import Conversion
|
130
|
+
```python
|
131
|
+
def make_relative_import(self, from_package: str, to_package: str) -> str:
|
132
|
+
"""Convert absolute import to relative import"""
|
133
|
+
# from my_client.models.user import User
|
134
|
+
# →
|
135
|
+
# from ..models.user import User (when called from my_client.endpoints)
|
136
|
+
|
137
|
+
from_parts = from_package.split(".")
|
138
|
+
to_parts = to_package.split(".")
|
139
|
+
|
140
|
+
# Find common prefix
|
141
|
+
common_len = self.find_common_prefix_length(from_parts, to_parts)
|
142
|
+
|
143
|
+
# Calculate relative depth
|
144
|
+
relative_depth = len(from_parts) - common_len
|
145
|
+
prefix = "." * relative_depth
|
146
|
+
|
147
|
+
# Build relative import
|
148
|
+
remaining_path = ".".join(to_parts[common_len:])
|
149
|
+
return f"from {prefix}{remaining_path} import"
|
150
|
+
```
|
151
|
+
|
152
|
+
## Template Management
|
153
|
+
|
154
|
+
### 1. Template Variables
|
155
|
+
```python
|
156
|
+
# Store template variables for consistent rendering
|
157
|
+
context.template_vars.update({
|
158
|
+
"client_name": "MyAPIClient",
|
159
|
+
"base_url": "https://api.example.com",
|
160
|
+
"version": "1.0.0",
|
161
|
+
"auth_type": "bearer"
|
162
|
+
})
|
163
|
+
```
|
164
|
+
|
165
|
+
### 2. Template Rendering
|
166
|
+
```python
|
167
|
+
def render_template(self, template: str, **kwargs) -> str:
|
168
|
+
"""Render template with context variables"""
|
169
|
+
all_vars = {**self.template_vars, **kwargs}
|
170
|
+
return template.format(**all_vars)
|
171
|
+
```
|
172
|
+
|
173
|
+
## Dependencies on Other Systems
|
174
|
+
|
175
|
+
### From types/
|
176
|
+
- Implements `TypeContext` protocol for type resolution
|
177
|
+
- Provides import registration for complex types
|
178
|
+
|
179
|
+
### From visit/
|
180
|
+
- Receives import registration during code generation
|
181
|
+
- Provides path resolution for relative imports
|
182
|
+
|
183
|
+
### From emitters/
|
184
|
+
- Provides file writing capabilities
|
185
|
+
- Supplies consolidated imports for file headers
|
186
|
+
|
187
|
+
## Testing Requirements
|
188
|
+
|
189
|
+
### Import Management Tests
|
190
|
+
```python
|
191
|
+
def test_import_collector__multiple_from_imports__consolidates_correctly():
|
192
|
+
# Arrange
|
193
|
+
collector = ImportCollector()
|
194
|
+
collector.add_import("from typing import List")
|
195
|
+
collector.add_import("from typing import Dict")
|
196
|
+
|
197
|
+
# Act
|
198
|
+
imports = collector.get_sorted_imports()
|
199
|
+
|
200
|
+
# Assert
|
201
|
+
assert "from typing import Dict, List" in imports
|
202
|
+
```
|
203
|
+
|
204
|
+
### Path Resolution Tests
|
205
|
+
```python
|
206
|
+
def test_file_manager__package_path__resolves_correctly():
|
207
|
+
# Test package name to file path conversion
|
208
|
+
manager = FileManager(Path("/project"))
|
209
|
+
path = manager.resolve_package_path("my_client.models")
|
210
|
+
|
211
|
+
assert path == Path("/project/my_client/models")
|
212
|
+
```
|
213
|
+
|
214
|
+
## Extension Points
|
215
|
+
|
216
|
+
### Custom Import Sorting
|
217
|
+
```python
|
218
|
+
class CustomImportCollector(ImportCollector):
|
219
|
+
def sort_imports_by_category(self) -> List[str]:
|
220
|
+
# Custom import sorting logic
|
221
|
+
# Example: Group all async imports together
|
222
|
+
pass
|
223
|
+
```
|
224
|
+
|
225
|
+
### Template System Integration
|
226
|
+
```python
|
227
|
+
def add_template_engine(self, engine: TemplateEngine) -> None:
|
228
|
+
"""Add custom template engine (Jinja2, etc.)"""
|
229
|
+
self.template_engine = engine
|
230
|
+
|
231
|
+
def render_template(self, template_name: str, **kwargs) -> str:
|
232
|
+
"""Render template using custom engine"""
|
233
|
+
return self.template_engine.render(template_name, **kwargs)
|
234
|
+
```
|
235
|
+
|
236
|
+
## Critical Implementation Details
|
237
|
+
|
238
|
+
### Thread Safety
|
239
|
+
```python
|
240
|
+
# Context is NOT thread-safe by design
|
241
|
+
# Each generation session gets its own context instance
|
242
|
+
def create_context() -> RenderContext:
|
243
|
+
return RenderContext(project_root, output_package)
|
244
|
+
```
|
245
|
+
|
246
|
+
### Memory Management
|
247
|
+
```python
|
248
|
+
# Clear context between files to prevent memory leaks
|
249
|
+
def emit_file(self, file_path: Path, generator_func: Callable) -> None:
|
250
|
+
self.context.clear_imports()
|
251
|
+
|
252
|
+
# Generate code
|
253
|
+
code = generator_func(self.context)
|
254
|
+
|
255
|
+
# Write file with imports
|
256
|
+
imports = self.context.get_imports()
|
257
|
+
final_code = self.combine_imports_and_code(imports, code)
|
258
|
+
|
259
|
+
self.file_manager.write_file(file_path, final_code)
|
260
|
+
```
|
261
|
+
|
262
|
+
### Error Context
|
263
|
+
```python
|
264
|
+
# Always provide context in error messages
|
265
|
+
def add_import_with_context(self, import_statement: str, file_context: str) -> None:
|
266
|
+
try:
|
267
|
+
self.import_collector.add_import(import_statement)
|
268
|
+
except Exception as e:
|
269
|
+
raise ImportError(f"Failed to add import '{import_statement}' in {file_context}: {e}")
|
270
|
+
```
|
271
|
+
|
272
|
+
## Common Pitfalls
|
273
|
+
|
274
|
+
1. **Import Leakage**: Not clearing imports between files
|
275
|
+
2. **Path Confusion**: Using absolute paths instead of relative
|
276
|
+
3. **State Mutation**: Modifying context from multiple threads
|
277
|
+
4. **Memory Leaks**: Not cleaning up context after generation
|
278
|
+
|
279
|
+
## Best Practices
|
280
|
+
|
281
|
+
1. **One Context Per Session**: Create new context for each generation
|
282
|
+
2. **Clear Between Files**: Always clear imports between file generations
|
283
|
+
3. **Use Relative Imports**: Convert absolute imports to relative
|
284
|
+
4. **Error Context**: Include file/operation context in errors
|
@@ -274,10 +274,10 @@ class ImportCollector:
|
|
274
274
|
is_core_module_to_be_absolute = True
|
275
275
|
|
276
276
|
if is_core_module_to_be_absolute:
|
277
|
-
import_statement = f"from {module_name} import {
|
277
|
+
import_statement = f"from {module_name} import {', '.join(names)}"
|
278
278
|
standard_import_lines.append(import_statement)
|
279
279
|
elif is_stdlib_module:
|
280
|
-
import_statement = f"from {module_name} import {
|
280
|
+
import_statement = f"from {module_name} import {', '.join(names)}"
|
281
281
|
standard_import_lines.append(import_statement)
|
282
282
|
elif (
|
283
283
|
current_module_dot_path_to_use
|
@@ -286,13 +286,13 @@ class ImportCollector:
|
|
286
286
|
):
|
287
287
|
try:
|
288
288
|
relative_module = make_relative_import(current_module_dot_path_to_use, module_name)
|
289
|
-
import_statement = f"from {relative_module} import {
|
289
|
+
import_statement = f"from {relative_module} import {', '.join(names)}"
|
290
290
|
standard_import_lines.append(import_statement)
|
291
291
|
except ValueError as e:
|
292
|
-
import_statement = f"from {module_name} import {
|
292
|
+
import_statement = f"from {module_name} import {', '.join(names)}"
|
293
293
|
standard_import_lines.append(import_statement)
|
294
294
|
else:
|
295
|
-
import_statement = f"from {module_name} import {
|
295
|
+
import_statement = f"from {module_name} import {', '.join(names)}"
|
296
296
|
standard_import_lines.append(import_statement)
|
297
297
|
|
298
298
|
plain_import_lines: List[str] = []
|
@@ -331,7 +331,7 @@ class ImportCollector:
|
|
331
331
|
|
332
332
|
for module in stdlib_modules:
|
333
333
|
names = sorted(self.imports[module])
|
334
|
-
statements.append(f"from {module} import {
|
334
|
+
statements.append(f"from {module} import {', '.join(names)}")
|
335
335
|
|
336
336
|
# Then third-party and app imports
|
337
337
|
other_modules = sorted([m for m in self.imports.keys() if not _is_stdlib(m)])
|
@@ -341,7 +341,7 @@ class ImportCollector:
|
|
341
341
|
|
342
342
|
for module in other_modules:
|
343
343
|
names = sorted(self.imports[module])
|
344
|
-
statements.append(f"from {module} import {
|
344
|
+
statements.append(f"from {module} import {', '.join(names)}")
|
345
345
|
|
346
346
|
# Then plain imports
|
347
347
|
if self.plain_imports:
|
@@ -357,7 +357,7 @@ class ImportCollector:
|
|
357
357
|
|
358
358
|
for module in sorted(self.relative_imports.keys()):
|
359
359
|
names = sorted(self.relative_imports[module])
|
360
|
-
statements.append(f"from {module} import {
|
360
|
+
statements.append(f"from {module} import {', '.join(names)}")
|
361
361
|
|
362
362
|
return "\n".join(statements)
|
363
363
|
|
@@ -0,0 +1,224 @@
|
|
1
|
+
# core/ - OpenAPI Parsing and Runtime Components
|
2
|
+
|
3
|
+
## Why This Folder?
|
4
|
+
Raw OpenAPI spec → Intermediate Representation (IR) transformation. Contains parsing logic, cycle detection, and runtime components that get copied to generated clients.
|
5
|
+
|
6
|
+
## Key Dependencies
|
7
|
+
- **Input**: Raw OpenAPI spec dicts from YAML/JSON
|
8
|
+
- **Output**: `IRSpec`, `IRSchema`, `IRResponse`, `IROperation` objects
|
9
|
+
- **Runtime**: Components copied to generated clients (`auth/`, `exceptions.py`, `http_transport.py`)
|
10
|
+
|
11
|
+
## Essential Architecture
|
12
|
+
|
13
|
+
### 1. Parsing Pipeline
|
14
|
+
```mermaid
|
15
|
+
graph LR
|
16
|
+
A[Raw OpenAPI] --> B[loader/] --> C[parsing/] --> D[IRSpec]
|
17
|
+
B --> E[schemas/extractor.py]
|
18
|
+
C --> F[unified_cycle_detection.py]
|
19
|
+
C --> G[transformers/]
|
20
|
+
```
|
21
|
+
|
22
|
+
### 2. Cycle Detection State Machine
|
23
|
+
```python
|
24
|
+
# parsing/unified_cycle_detection.py
|
25
|
+
@dataclass
|
26
|
+
class CycleInfo:
|
27
|
+
type: CycleType # STRUCTURAL, SELF_REF, DEPTH_LIMIT
|
28
|
+
action: CycleAction # PLACEHOLDER, FORWARD_REF, DEPTH_CUTOFF
|
29
|
+
depth: int
|
30
|
+
|
31
|
+
# Usage in parsing
|
32
|
+
if context.detect_cycle(schema_name, current_depth):
|
33
|
+
return create_cycle_placeholder(schema_name, cycle_info)
|
34
|
+
```
|
35
|
+
|
36
|
+
## Critical Components
|
37
|
+
|
38
|
+
### parsing/schema_parser.py
|
39
|
+
**Purpose**: Main schema parsing with cycle detection
|
40
|
+
```python
|
41
|
+
def parse_schema(schema_data: Dict[str, Any], context: ParsingContext) -> IRSchema:
|
42
|
+
# 1. Cycle detection
|
43
|
+
# 2. Keyword parsing (allOf, oneOf, anyOf)
|
44
|
+
# 3. Transformer application
|
45
|
+
# 4. IR object creation
|
46
|
+
```
|
47
|
+
|
48
|
+
### parsing/transformers/
|
49
|
+
**Purpose**: Modify parsed schemas before IR creation
|
50
|
+
- **inline_enum_extractor.py**: Extract inline enums to global schemas
|
51
|
+
- **inline_object_promoter.py**: Promote inline objects to named schemas
|
52
|
+
|
53
|
+
### loader/
|
54
|
+
**Purpose**: Load and validate OpenAPI specs
|
55
|
+
```python
|
56
|
+
# loader/loader.py
|
57
|
+
def load_spec(spec_path: str) -> IRSpec:
|
58
|
+
# 1. Load YAML/JSON
|
59
|
+
# 2. Validate OpenAPI format
|
60
|
+
# 3. Parse operations, schemas, responses
|
61
|
+
# 4. Build IRSpec
|
62
|
+
```
|
63
|
+
|
64
|
+
### Runtime Components (Copied to Clients)
|
65
|
+
|
66
|
+
#### auth/
|
67
|
+
```python
|
68
|
+
# auth/base.py - Base authentication classes
|
69
|
+
class AuthBase(ABC):
|
70
|
+
async def apply_auth(self, request: httpx.Request) -> httpx.Request:
|
71
|
+
# Modify request with auth info
|
72
|
+
|
73
|
+
# auth/plugins.py - Concrete implementations
|
74
|
+
class BearerAuth(AuthBase):
|
75
|
+
def __init__(self, token: str): ...
|
76
|
+
```
|
77
|
+
|
78
|
+
#### exceptions.py
|
79
|
+
```python
|
80
|
+
# Exception hierarchy for generated clients
|
81
|
+
class ClientError(Exception): ...
|
82
|
+
class ServerError(ClientError): ...
|
83
|
+
class ValidationError(ClientError): ...
|
84
|
+
```
|
85
|
+
|
86
|
+
#### http_transport.py
|
87
|
+
```python
|
88
|
+
# HTTP client abstraction
|
89
|
+
class HTTPTransport:
|
90
|
+
def __init__(self, base_url: str, auth: Optional[AuthBase] = None):
|
91
|
+
self.client = httpx.AsyncClient(base_url=base_url)
|
92
|
+
```
|
93
|
+
|
94
|
+
## Environment Variables
|
95
|
+
```python
|
96
|
+
# parsing/unified_cycle_detection.py
|
97
|
+
PYOPENAPI_MAX_DEPTH = int(os.getenv("PYOPENAPI_MAX_DEPTH", "150"))
|
98
|
+
PYOPENAPI_MAX_CYCLES = int(os.getenv("PYOPENAPI_MAX_CYCLES", "0"))
|
99
|
+
```
|
100
|
+
|
101
|
+
## Dependencies on Other Systems
|
102
|
+
|
103
|
+
### From types/
|
104
|
+
- Schema → Python type conversion after parsing
|
105
|
+
- Type resolution for complex compositions
|
106
|
+
|
107
|
+
### From context/
|
108
|
+
- `ParsingContext` for cycle detection state
|
109
|
+
- Import management during parsing
|
110
|
+
|
111
|
+
### To visit/
|
112
|
+
- Provides `IRSpec` for visitor pattern traversal
|
113
|
+
- IR objects consumed by code generators
|
114
|
+
|
115
|
+
## Common Parsing Patterns
|
116
|
+
|
117
|
+
### 1. Keyword Composition
|
118
|
+
```python
|
119
|
+
# parsing/keywords/all_of_parser.py
|
120
|
+
def parse_all_of(all_of_items: List[Dict], context: ParsingContext) -> IRSchema:
|
121
|
+
# Merge all schemas into single IR object
|
122
|
+
merged_schema = IRSchema(type="object")
|
123
|
+
for item in all_of_items:
|
124
|
+
parsed_item = parse_schema(item, context)
|
125
|
+
merged_schema = merge_schemas(merged_schema, parsed_item)
|
126
|
+
return merged_schema
|
127
|
+
```
|
128
|
+
|
129
|
+
### 2. Reference Resolution
|
130
|
+
```python
|
131
|
+
# parsing/common/ref_resolution/resolve_schema_ref.py
|
132
|
+
def resolve_schema_ref(ref: str, context: ParsingContext) -> IRSchema:
|
133
|
+
if ref.startswith("#/components/schemas/"):
|
134
|
+
schema_name = ref.split("/")[-1]
|
135
|
+
return context.get_schema(schema_name)
|
136
|
+
raise ValueError(f"Unsupported ref: {ref}")
|
137
|
+
```
|
138
|
+
|
139
|
+
### 3. Cycle Detection
|
140
|
+
```python
|
141
|
+
# parsing/unified_cycle_detection.py
|
142
|
+
def detect_cycle(schema_name: str, current_depth: int, context: ParsingContext) -> Optional[CycleInfo]:
|
143
|
+
if current_depth > PYOPENAPI_MAX_DEPTH:
|
144
|
+
return CycleInfo(CycleType.DEPTH_LIMIT, CycleAction.DEPTH_CUTOFF, current_depth)
|
145
|
+
|
146
|
+
if schema_name in context.parsing_stack:
|
147
|
+
return CycleInfo(CycleType.STRUCTURAL, CycleAction.PLACEHOLDER, current_depth)
|
148
|
+
|
149
|
+
return None
|
150
|
+
```
|
151
|
+
|
152
|
+
## Testing Requirements
|
153
|
+
|
154
|
+
### Parser Tests
|
155
|
+
```python
|
156
|
+
def test_parse_schema__all_of_composition__merges_correctly():
|
157
|
+
# Test keyword parsers with real OpenAPI data
|
158
|
+
schema_data = {
|
159
|
+
"allOf": [
|
160
|
+
{"type": "object", "properties": {"name": {"type": "string"}}},
|
161
|
+
{"type": "object", "properties": {"age": {"type": "integer"}}}
|
162
|
+
]
|
163
|
+
}
|
164
|
+
# Test parsing result
|
165
|
+
```
|
166
|
+
|
167
|
+
### Cycle Detection Tests
|
168
|
+
```python
|
169
|
+
def test_unified_cycle_detection__structural_cycle__creates_placeholder():
|
170
|
+
# Test cycle detection with circular references
|
171
|
+
context = ParsingContext()
|
172
|
+
# Create circular reference scenario
|
173
|
+
# Verify placeholder creation
|
174
|
+
```
|
175
|
+
|
176
|
+
## Extension Points
|
177
|
+
|
178
|
+
### Adding New Keywords
|
179
|
+
```python
|
180
|
+
# parsing/keywords/new_keyword_parser.py
|
181
|
+
def parse_new_keyword(keyword_data: Any, context: ParsingContext) -> IRSchema:
|
182
|
+
# Custom keyword parsing logic
|
183
|
+
pass
|
184
|
+
```
|
185
|
+
|
186
|
+
### Adding New Transformers
|
187
|
+
```python
|
188
|
+
# parsing/transformers/new_transformer.py
|
189
|
+
def transform_schema(schema: IRSchema, context: ParsingContext) -> IRSchema:
|
190
|
+
# Custom transformation logic
|
191
|
+
return modified_schema
|
192
|
+
```
|
193
|
+
|
194
|
+
## Critical Implementation Details
|
195
|
+
|
196
|
+
### IR Object Creation
|
197
|
+
```python
|
198
|
+
# Always use IRSchema constructor with all required fields
|
199
|
+
schema = IRSchema(
|
200
|
+
name=schema_name,
|
201
|
+
type=schema_type,
|
202
|
+
properties=properties,
|
203
|
+
required=required_fields,
|
204
|
+
description=description,
|
205
|
+
enum=enum_values,
|
206
|
+
is_nullable=is_nullable
|
207
|
+
)
|
208
|
+
```
|
209
|
+
|
210
|
+
### Context State Management
|
211
|
+
```python
|
212
|
+
# parsing/context.py
|
213
|
+
class ParsingContext:
|
214
|
+
def __init__(self):
|
215
|
+
self.parsed_schemas: Dict[str, IRSchema] = {}
|
216
|
+
self.parsing_stack: List[str] = [] # For cycle detection
|
217
|
+
self.forward_refs: Set[str] = set()
|
218
|
+
|
219
|
+
def enter_schema(self, schema_name: str):
|
220
|
+
self.parsing_stack.append(schema_name)
|
221
|
+
|
222
|
+
def exit_schema(self, schema_name: str):
|
223
|
+
self.parsing_stack.remove(schema_name)
|
224
|
+
```
|
@@ -120,7 +120,7 @@ def parse_operations(
|
|
120
120
|
# Handle direct schema references in responses
|
121
121
|
# Convert schema reference to a response with content
|
122
122
|
resp_node_resolved = {
|
123
|
-
"description": f"Response with {rn_node[
|
123
|
+
"description": f"Response with {rn_node['$ref'].split('/')[-1]} schema",
|
124
124
|
"content": {"application/json": {"schema": {"$ref": rn_node["$ref"]}}},
|
125
125
|
}
|
126
126
|
else:
|
@@ -92,7 +92,7 @@ def _handle_max_depth_exceeded(original_name: Optional[str], context: ParsingCon
|
|
92
92
|
|
93
93
|
# path_prefix = schema_ir_name_attr if schema_ir_name_attr else "<anonymous_schema>"
|
94
94
|
# cycle_path_for_desc = f"{path_prefix} -> MAX_DEPTH_EXCEEDED"
|
95
|
-
description = f"[Maximum recursion depth ({max_depth}) exceeded for '{original_name or
|
95
|
+
description = f"[Maximum recursion depth ({max_depth}) exceeded for '{original_name or 'anonymous'}']"
|
96
96
|
logger.warning(description)
|
97
97
|
|
98
98
|
placeholder_schema = IRSchema(
|
@@ -81,15 +81,15 @@ def _parse_properties(
|
|
81
81
|
if promoted_ir is not None:
|
82
82
|
properties_map[prop_key] = promoted_ir
|
83
83
|
logger.debug(
|
84
|
-
f"Added promoted '{prop_key}' (name: {getattr(promoted_ir,
|
84
|
+
f"Added promoted '{prop_key}' (name: {getattr(promoted_ir, 'name', 'N/A')}) "
|
85
85
|
f"to properties_map for '{parent_schema_name}'"
|
86
86
|
)
|
87
87
|
else:
|
88
88
|
properties_map[prop_key] = prop_schema_ir
|
89
89
|
logger.debug(
|
90
|
-
f"Added original '{prop_key}' (name: {getattr(prop_schema_ir,
|
91
|
-
f"type: {getattr(prop_schema_ir,
|
92
|
-
f"circular: {getattr(prop_schema_ir,
|
90
|
+
f"Added original '{prop_key}' (name: {getattr(prop_schema_ir, 'name', 'N/A')}, "
|
91
|
+
f"type: {getattr(prop_schema_ir, 'type', 'N/A')}, "
|
92
|
+
f"circular: {getattr(prop_schema_ir, '_is_circular_ref', 'N/A')}) "
|
93
93
|
f"to properties_map for '{parent_schema_name}'"
|
94
94
|
)
|
95
95
|
|
@@ -42,7 +42,7 @@ def _resolve_ref(
|
|
42
42
|
if not (ref_name_parts and ref_name_parts[-1]):
|
43
43
|
logger.warning(
|
44
44
|
f"Malformed $ref path '{ref_path_str}' encountered while parsing "
|
45
|
-
f"parent '{parent_schema_name or
|
45
|
+
f"parent '{parent_schema_name or 'anonymous'}'."
|
46
46
|
)
|
47
47
|
return IRSchema(
|
48
48
|
name=None, # Anonymous placeholder for a bad ref
|
@@ -60,7 +60,7 @@ def _resolve_ref(
|
|
60
60
|
ref_node = context.raw_spec_schemas.get(ref_name)
|
61
61
|
if ref_node is None:
|
62
62
|
logger.warning(
|
63
|
-
f"Cannot resolve $ref '{ref_path_str}' for parent '{parent_schema_name or
|
63
|
+
f"Cannot resolve $ref '{ref_path_str}' for parent '{parent_schema_name or 'anonymous'}'. "
|
64
64
|
f"Target '{ref_name}' not in raw_spec_schemas. Returning placeholder."
|
65
65
|
)
|
66
66
|
return IRSchema(
|
@@ -142,7 +142,7 @@ def _parse_properties(
|
|
142
142
|
for prop_name, prop_schema_node in properties_node.items():
|
143
143
|
if not isinstance(prop_name, str) or not prop_name:
|
144
144
|
logger.warning(
|
145
|
-
f"Skipping property with invalid name '{prop_name}' in schema '{parent_schema_name or
|
145
|
+
f"Skipping property with invalid name '{prop_name}' in schema '{parent_schema_name or 'anonymous'}'."
|
146
146
|
)
|
147
147
|
continue
|
148
148
|
|
@@ -379,7 +379,7 @@ def _parse_schema(
|
|
379
379
|
|
380
380
|
assert isinstance(
|
381
381
|
schema_node, Mapping
|
382
|
-
), f"Schema node for '{schema_name or
|
382
|
+
), f"Schema node for '{schema_name or 'anonymous'}' must be a Mapping (e.g., dict), got {type(schema_node)}"
|
383
383
|
|
384
384
|
# If the current schema_node itself is a $ref, resolve it.
|
385
385
|
if "$ref" in schema_node:
|
@@ -179,7 +179,7 @@ def _process_standalone_inline_enum(
|
|
179
179
|
|
180
180
|
logger.debug(
|
181
181
|
f"STANDALONE_ENUM_CHECK: Processing node for "
|
182
|
-
f"'{schema_name or schema_obj.name or
|
182
|
+
f"'{schema_name or schema_obj.name or 'anonymous_schema'}' for direct enum properties."
|
183
183
|
)
|
184
184
|
|
185
185
|
# Ensure basic enum properties are on schema_obj if not already there from initial _parse_schema pass
|
@@ -262,7 +262,7 @@ class PythonConstructRenderer:
|
|
262
262
|
|
263
263
|
# Sort mappings for consistent output
|
264
264
|
for api_field, python_field in sorted(field_mappings.items()):
|
265
|
-
writer.write_line(f"
|
265
|
+
writer.write_line(f'"{api_field}": "{python_field}",')
|
266
266
|
|
267
267
|
writer.dedent()
|
268
268
|
writer.write_line("}")
|
@@ -302,7 +302,7 @@ class PythonConstructRenderer:
|
|
302
302
|
```
|
303
303
|
"""
|
304
304
|
writer = CodeWriter()
|
305
|
-
bases = f"({
|
305
|
+
bases = f"({', '.join(base_classes)})" if base_classes else ""
|
306
306
|
writer.write_line(f"class {class_name}{bases}:")
|
307
307
|
writer.indent()
|
308
308
|
has_content = False
|