pyopenapi-gen 2.7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyopenapi_gen/__init__.py +224 -0
- pyopenapi_gen/__main__.py +6 -0
- pyopenapi_gen/cli.py +62 -0
- pyopenapi_gen/context/CLAUDE.md +284 -0
- pyopenapi_gen/context/file_manager.py +52 -0
- pyopenapi_gen/context/import_collector.py +382 -0
- pyopenapi_gen/context/render_context.py +726 -0
- pyopenapi_gen/core/CLAUDE.md +224 -0
- pyopenapi_gen/core/__init__.py +0 -0
- pyopenapi_gen/core/auth/base.py +22 -0
- pyopenapi_gen/core/auth/plugins.py +89 -0
- pyopenapi_gen/core/cattrs_converter.py +810 -0
- pyopenapi_gen/core/exceptions.py +20 -0
- pyopenapi_gen/core/http_status_codes.py +218 -0
- pyopenapi_gen/core/http_transport.py +222 -0
- pyopenapi_gen/core/loader/__init__.py +12 -0
- pyopenapi_gen/core/loader/loader.py +174 -0
- pyopenapi_gen/core/loader/operations/__init__.py +12 -0
- pyopenapi_gen/core/loader/operations/parser.py +161 -0
- pyopenapi_gen/core/loader/operations/post_processor.py +62 -0
- pyopenapi_gen/core/loader/operations/request_body.py +90 -0
- pyopenapi_gen/core/loader/parameters/__init__.py +10 -0
- pyopenapi_gen/core/loader/parameters/parser.py +186 -0
- pyopenapi_gen/core/loader/responses/__init__.py +10 -0
- pyopenapi_gen/core/loader/responses/parser.py +111 -0
- pyopenapi_gen/core/loader/schemas/__init__.py +11 -0
- pyopenapi_gen/core/loader/schemas/extractor.py +275 -0
- pyopenapi_gen/core/pagination.py +64 -0
- pyopenapi_gen/core/parsing/__init__.py +13 -0
- pyopenapi_gen/core/parsing/common/__init__.py +1 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/__init__.py +9 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/__init__.py +0 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/cyclic_properties.py +66 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/direct_cycle.py +33 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/existing_schema.py +22 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/list_response.py +54 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/missing_ref.py +52 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/new_schema.py +50 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/stripped_suffix.py +51 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/resolve_schema_ref.py +86 -0
- pyopenapi_gen/core/parsing/common/type_parser.py +73 -0
- pyopenapi_gen/core/parsing/context.py +187 -0
- pyopenapi_gen/core/parsing/cycle_helpers.py +126 -0
- pyopenapi_gen/core/parsing/keywords/__init__.py +1 -0
- pyopenapi_gen/core/parsing/keywords/all_of_parser.py +81 -0
- pyopenapi_gen/core/parsing/keywords/any_of_parser.py +84 -0
- pyopenapi_gen/core/parsing/keywords/array_items_parser.py +72 -0
- pyopenapi_gen/core/parsing/keywords/one_of_parser.py +77 -0
- pyopenapi_gen/core/parsing/keywords/properties_parser.py +98 -0
- pyopenapi_gen/core/parsing/schema_finalizer.py +169 -0
- pyopenapi_gen/core/parsing/schema_parser.py +804 -0
- pyopenapi_gen/core/parsing/transformers/__init__.py +0 -0
- pyopenapi_gen/core/parsing/transformers/inline_enum_extractor.py +285 -0
- pyopenapi_gen/core/parsing/transformers/inline_object_promoter.py +120 -0
- pyopenapi_gen/core/parsing/unified_cycle_detection.py +293 -0
- pyopenapi_gen/core/postprocess_manager.py +260 -0
- pyopenapi_gen/core/spec_fetcher.py +148 -0
- pyopenapi_gen/core/streaming_helpers.py +84 -0
- pyopenapi_gen/core/telemetry.py +69 -0
- pyopenapi_gen/core/utils.py +456 -0
- pyopenapi_gen/core/warning_collector.py +83 -0
- pyopenapi_gen/core/writers/code_writer.py +135 -0
- pyopenapi_gen/core/writers/documentation_writer.py +222 -0
- pyopenapi_gen/core/writers/line_writer.py +217 -0
- pyopenapi_gen/core/writers/python_construct_renderer.py +321 -0
- pyopenapi_gen/core_package_template/README.md +21 -0
- pyopenapi_gen/emit/models_emitter.py +143 -0
- pyopenapi_gen/emitters/CLAUDE.md +286 -0
- pyopenapi_gen/emitters/client_emitter.py +51 -0
- pyopenapi_gen/emitters/core_emitter.py +181 -0
- pyopenapi_gen/emitters/docs_emitter.py +44 -0
- pyopenapi_gen/emitters/endpoints_emitter.py +247 -0
- pyopenapi_gen/emitters/exceptions_emitter.py +187 -0
- pyopenapi_gen/emitters/mocks_emitter.py +185 -0
- pyopenapi_gen/emitters/models_emitter.py +426 -0
- pyopenapi_gen/generator/CLAUDE.md +352 -0
- pyopenapi_gen/generator/client_generator.py +567 -0
- pyopenapi_gen/generator/exceptions.py +7 -0
- pyopenapi_gen/helpers/CLAUDE.md +325 -0
- pyopenapi_gen/helpers/__init__.py +1 -0
- pyopenapi_gen/helpers/endpoint_utils.py +532 -0
- pyopenapi_gen/helpers/type_cleaner.py +334 -0
- pyopenapi_gen/helpers/type_helper.py +112 -0
- pyopenapi_gen/helpers/type_resolution/__init__.py +1 -0
- pyopenapi_gen/helpers/type_resolution/array_resolver.py +57 -0
- pyopenapi_gen/helpers/type_resolution/composition_resolver.py +79 -0
- pyopenapi_gen/helpers/type_resolution/finalizer.py +105 -0
- pyopenapi_gen/helpers/type_resolution/named_resolver.py +172 -0
- pyopenapi_gen/helpers/type_resolution/object_resolver.py +216 -0
- pyopenapi_gen/helpers/type_resolution/primitive_resolver.py +109 -0
- pyopenapi_gen/helpers/type_resolution/resolver.py +47 -0
- pyopenapi_gen/helpers/url_utils.py +14 -0
- pyopenapi_gen/http_types.py +20 -0
- pyopenapi_gen/ir.py +165 -0
- pyopenapi_gen/py.typed +1 -0
- pyopenapi_gen/types/CLAUDE.md +140 -0
- pyopenapi_gen/types/__init__.py +11 -0
- pyopenapi_gen/types/contracts/__init__.py +13 -0
- pyopenapi_gen/types/contracts/protocols.py +106 -0
- pyopenapi_gen/types/contracts/types.py +28 -0
- pyopenapi_gen/types/resolvers/__init__.py +7 -0
- pyopenapi_gen/types/resolvers/reference_resolver.py +71 -0
- pyopenapi_gen/types/resolvers/response_resolver.py +177 -0
- pyopenapi_gen/types/resolvers/schema_resolver.py +498 -0
- pyopenapi_gen/types/services/__init__.py +5 -0
- pyopenapi_gen/types/services/type_service.py +165 -0
- pyopenapi_gen/types/strategies/__init__.py +5 -0
- pyopenapi_gen/types/strategies/response_strategy.py +310 -0
- pyopenapi_gen/visit/CLAUDE.md +272 -0
- pyopenapi_gen/visit/client_visitor.py +477 -0
- pyopenapi_gen/visit/docs_visitor.py +38 -0
- pyopenapi_gen/visit/endpoint/__init__.py +1 -0
- pyopenapi_gen/visit/endpoint/endpoint_visitor.py +292 -0
- pyopenapi_gen/visit/endpoint/generators/__init__.py +1 -0
- pyopenapi_gen/visit/endpoint/generators/docstring_generator.py +123 -0
- pyopenapi_gen/visit/endpoint/generators/endpoint_method_generator.py +222 -0
- pyopenapi_gen/visit/endpoint/generators/mock_generator.py +140 -0
- pyopenapi_gen/visit/endpoint/generators/overload_generator.py +252 -0
- pyopenapi_gen/visit/endpoint/generators/request_generator.py +103 -0
- pyopenapi_gen/visit/endpoint/generators/response_handler_generator.py +705 -0
- pyopenapi_gen/visit/endpoint/generators/signature_generator.py +83 -0
- pyopenapi_gen/visit/endpoint/generators/url_args_generator.py +207 -0
- pyopenapi_gen/visit/endpoint/processors/__init__.py +1 -0
- pyopenapi_gen/visit/endpoint/processors/import_analyzer.py +78 -0
- pyopenapi_gen/visit/endpoint/processors/parameter_processor.py +171 -0
- pyopenapi_gen/visit/exception_visitor.py +90 -0
- pyopenapi_gen/visit/model/__init__.py +0 -0
- pyopenapi_gen/visit/model/alias_generator.py +93 -0
- pyopenapi_gen/visit/model/dataclass_generator.py +553 -0
- pyopenapi_gen/visit/model/enum_generator.py +212 -0
- pyopenapi_gen/visit/model/model_visitor.py +198 -0
- pyopenapi_gen/visit/visitor.py +97 -0
- pyopenapi_gen-2.7.2.dist-info/METADATA +1169 -0
- pyopenapi_gen-2.7.2.dist-info/RECORD +137 -0
- pyopenapi_gen-2.7.2.dist-info/WHEEL +4 -0
- pyopenapi_gen-2.7.2.dist-info/entry_points.txt +2 -0
- pyopenapi_gen-2.7.2.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import List, Tuple
|
|
4
|
+
|
|
5
|
+
from pyopenapi_gen import IROperation, IRParameter, IRRequestBody
|
|
6
|
+
from pyopenapi_gen.context.render_context import RenderContext
|
|
7
|
+
from pyopenapi_gen.visit.endpoint.endpoint_visitor import EndpointVisitor
|
|
8
|
+
|
|
9
|
+
from ..core.utils import Formatter, NameSanitizer
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
# Basic OpenAPI schema to Python type mapping for parameters
|
|
14
|
+
PARAM_TYPE_MAPPING = {
|
|
15
|
+
"integer": "int",
|
|
16
|
+
"number": "float",
|
|
17
|
+
"boolean": "bool",
|
|
18
|
+
"string": "str",
|
|
19
|
+
"array": "List",
|
|
20
|
+
"object": "dict[str, Any]",
|
|
21
|
+
}
|
|
22
|
+
# Format-specific overrides
|
|
23
|
+
PARAM_FORMAT_MAPPING = {
|
|
24
|
+
"int32": "int",
|
|
25
|
+
"int64": "int",
|
|
26
|
+
"float": "float",
|
|
27
|
+
"double": "float",
|
|
28
|
+
"byte": "str",
|
|
29
|
+
"binary": "bytes",
|
|
30
|
+
"date": "date",
|
|
31
|
+
"date-time": "datetime",
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
# Default tag for untagged operations
|
|
35
|
+
DEFAULT_TAG = "default"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def schema_to_type(schema: IRParameter) -> str:
|
|
39
|
+
"""Convert an IRParameter's schema to a Python type string."""
|
|
40
|
+
s = schema.schema # s is an IRSchema instance
|
|
41
|
+
py_type: str = "Any" # Default base type
|
|
42
|
+
|
|
43
|
+
# 1. Determine base type (without Optional wrapper yet)
|
|
44
|
+
# Format-specific override has highest precedence for base type determination
|
|
45
|
+
if s.format and s.format in PARAM_FORMAT_MAPPING:
|
|
46
|
+
py_type = PARAM_FORMAT_MAPPING[s.format]
|
|
47
|
+
# Array handling
|
|
48
|
+
elif s.type == "array" and s.items:
|
|
49
|
+
# For array items, we recursively call schema_to_type.
|
|
50
|
+
# The nullability of the item_type itself (e.g. List[int | None])
|
|
51
|
+
# will be handled by the recursive call based on s.items.is_nullable.
|
|
52
|
+
item_schema_as_param = IRParameter(name="_item", param_in="_internal", required=False, schema=s.items)
|
|
53
|
+
item_type_str = schema_to_type(item_schema_as_param)
|
|
54
|
+
py_type = f"List[{item_type_str}]"
|
|
55
|
+
# Default mapping based on s.type (primary type)
|
|
56
|
+
elif s.type and s.type in PARAM_TYPE_MAPPING:
|
|
57
|
+
py_type = PARAM_TYPE_MAPPING[s.type]
|
|
58
|
+
# Fallback if type is None or not in mappings (and not format override/array)
|
|
59
|
+
# If s.type is None and there was no format override, it defaults to "Any".
|
|
60
|
+
# If s.type is something not recognized, it also defaults to "Any".
|
|
61
|
+
elif not s.type and not s.format: # Type is None, no format override
|
|
62
|
+
py_type = "Any"
|
|
63
|
+
elif s.type: # Type is some string not in PARAM_TYPE_MAPPING and not an array handled above
|
|
64
|
+
# This could be a reference to a model. For now, schema_to_type is simple and returns Any.
|
|
65
|
+
# A more sophisticated version would return the schema name for model visitor to handle.
|
|
66
|
+
# However, based on existing PARAM_TYPE_MAPPING, unknown types become "Any".
|
|
67
|
+
py_type = "Any"
|
|
68
|
+
# If py_type is still "Any" here, it means none of the above conditions strongly set a type.
|
|
69
|
+
|
|
70
|
+
# 2. Apply nullability based on IRSchema's is_nullable field
|
|
71
|
+
# This s.is_nullable should be the source of truth from the IR after parsing.
|
|
72
|
+
if s.is_nullable:
|
|
73
|
+
# Ensure "Any" also gets wrapped, e.g. Any | None
|
|
74
|
+
py_type = f"{py_type} | None"
|
|
75
|
+
|
|
76
|
+
return py_type
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _get_request_body_type(body: IRRequestBody) -> str:
|
|
80
|
+
"""Determine the Python type for a request body schema."""
|
|
81
|
+
for mt, sch in body.content.items():
|
|
82
|
+
if "json" in mt.lower():
|
|
83
|
+
return schema_to_type(IRParameter(name="body", param_in="body", required=body.required, schema=sch))
|
|
84
|
+
# Fallback to generic dict
|
|
85
|
+
return "dict[str, Any]"
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def _deduplicate_tag_clients(client_classes: List[Tuple[str, str]]) -> List[Tuple[str, str]]:
|
|
89
|
+
"""
|
|
90
|
+
Deduplicate client class/module pairs by canonical module/class name.
|
|
91
|
+
Returns a list of unique (class_name, module_name) pairs.
|
|
92
|
+
"""
|
|
93
|
+
seen = set()
|
|
94
|
+
unique = []
|
|
95
|
+
for cls, mod in client_classes:
|
|
96
|
+
key = (cls.lower(), mod.lower())
|
|
97
|
+
if key not in seen:
|
|
98
|
+
seen.add(key)
|
|
99
|
+
unique.append((cls, mod))
|
|
100
|
+
return unique
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
class EndpointsEmitter:
|
|
104
|
+
"""Generates endpoint modules organized by tag from IRSpec using the visitor/context architecture."""
|
|
105
|
+
|
|
106
|
+
def __init__(self, context: RenderContext) -> None:
|
|
107
|
+
self.context = context
|
|
108
|
+
self.formatter = Formatter()
|
|
109
|
+
self.visitor: EndpointVisitor | None = None
|
|
110
|
+
|
|
111
|
+
def _deduplicate_operation_ids_globally(self, operations: List[IROperation]) -> None:
|
|
112
|
+
"""
|
|
113
|
+
Ensures all operations have unique method names globally across all tags.
|
|
114
|
+
|
|
115
|
+
This prevents the bug where operations with multiple tags share the same
|
|
116
|
+
IROperation object reference, causing _deduplicate_operation_ids() to
|
|
117
|
+
modify the same object multiple times and accumulate _2_2 suffixes.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
operations: List of all operations across all tags.
|
|
121
|
+
"""
|
|
122
|
+
seen_methods: dict[str, int] = {}
|
|
123
|
+
for op in operations:
|
|
124
|
+
method_name = NameSanitizer.sanitize_method_name(op.operation_id)
|
|
125
|
+
if method_name in seen_methods:
|
|
126
|
+
seen_methods[method_name] += 1
|
|
127
|
+
new_op_id = f"{op.operation_id}_{seen_methods[method_name]}"
|
|
128
|
+
op.operation_id = new_op_id
|
|
129
|
+
else:
|
|
130
|
+
seen_methods[method_name] = 1
|
|
131
|
+
|
|
132
|
+
def emit(self, operations: List[IROperation], output_dir_str: str) -> List[str]:
|
|
133
|
+
"""Render endpoint client files per tag under <output_dir>/endpoints.
|
|
134
|
+
Returns a list of generated file paths."""
|
|
135
|
+
output_dir = Path(output_dir_str)
|
|
136
|
+
endpoints_dir = output_dir / "endpoints"
|
|
137
|
+
|
|
138
|
+
self.context.file_manager.ensure_dir(str(endpoints_dir))
|
|
139
|
+
|
|
140
|
+
# Manage __init__.py and py.typed files
|
|
141
|
+
common_files_to_ensure = [
|
|
142
|
+
(endpoints_dir / "__init__.py", ""),
|
|
143
|
+
(output_dir / "__init__.py", ""), # Ensure root client package __init__.py
|
|
144
|
+
(endpoints_dir / "py.typed", ""),
|
|
145
|
+
]
|
|
146
|
+
for file_path, content in common_files_to_ensure:
|
|
147
|
+
if not file_path.exists():
|
|
148
|
+
self.context.file_manager.write_file(str(file_path), content)
|
|
149
|
+
|
|
150
|
+
# Ensure parsed_schemas is at least an empty dict if None,
|
|
151
|
+
# as EndpointVisitor expects dict[str, IRSchema]
|
|
152
|
+
current_parsed_schemas = self.context.parsed_schemas
|
|
153
|
+
if current_parsed_schemas is None:
|
|
154
|
+
logger.warning(
|
|
155
|
+
"[EndpointsEmitter] RenderContext.parsed_schemas was None. "
|
|
156
|
+
"Defaulting to empty dict for EndpointVisitor."
|
|
157
|
+
)
|
|
158
|
+
current_parsed_schemas = {} # Default to empty dict if None
|
|
159
|
+
|
|
160
|
+
if self.visitor is None:
|
|
161
|
+
self.visitor = EndpointVisitor(current_parsed_schemas) # Pass the (potentially defaulted) dict
|
|
162
|
+
|
|
163
|
+
# Deduplicate operation IDs globally BEFORE tag grouping to prevent
|
|
164
|
+
# multi-tag operations from accumulating _2_2 suffixes
|
|
165
|
+
self._deduplicate_operation_ids_globally(operations)
|
|
166
|
+
|
|
167
|
+
tag_key_to_ops: dict[str, List[IROperation]] = {}
|
|
168
|
+
tag_key_to_candidates: dict[str, List[str]] = {}
|
|
169
|
+
for op in operations:
|
|
170
|
+
tags = op.tags or [DEFAULT_TAG]
|
|
171
|
+
for tag in tags:
|
|
172
|
+
key = NameSanitizer.normalize_tag_key(tag)
|
|
173
|
+
tag_key_to_ops.setdefault(key, []).append(op)
|
|
174
|
+
tag_key_to_candidates.setdefault(key, []).append(tag)
|
|
175
|
+
|
|
176
|
+
def tag_score(t: str) -> tuple[bool, int, int, str]:
|
|
177
|
+
import re
|
|
178
|
+
|
|
179
|
+
is_pascal = bool(re.search(r"[a-z][A-Z]", t)) or bool(re.search(r"[A-Z]{2,}", t))
|
|
180
|
+
words = re.findall(r"[A-Z]?[a-z]+|[A-Z]+(?![a-z])|[0-9]+", t)
|
|
181
|
+
words += re.split(r"[_-]+", t)
|
|
182
|
+
word_count = len([w for w in words if w])
|
|
183
|
+
upper = sum(1 for c in t if c.isupper())
|
|
184
|
+
return (is_pascal, word_count, upper, t)
|
|
185
|
+
|
|
186
|
+
tag_map: dict[str, str] = {}
|
|
187
|
+
for key, candidates in tag_key_to_candidates.items():
|
|
188
|
+
best_tag_for_key = DEFAULT_TAG # Default if no candidates somehow
|
|
189
|
+
if candidates:
|
|
190
|
+
best_tag_for_key = max(candidates, key=tag_score)
|
|
191
|
+
tag_map[key] = best_tag_for_key
|
|
192
|
+
|
|
193
|
+
generated_files: List[str] = []
|
|
194
|
+
client_classes: List[Tuple[str, str]] = []
|
|
195
|
+
|
|
196
|
+
for key, ops_for_tag in tag_key_to_ops.items():
|
|
197
|
+
canonical_tag_name = tag_map[key]
|
|
198
|
+
module_name = NameSanitizer.sanitize_module_name(canonical_tag_name)
|
|
199
|
+
class_name = NameSanitizer.sanitize_class_name(canonical_tag_name) + "Client"
|
|
200
|
+
protocol_name = f"{class_name}Protocol"
|
|
201
|
+
file_path = endpoints_dir / f"{module_name}.py"
|
|
202
|
+
|
|
203
|
+
# This will set current_file and reset+reinit import_collector's context
|
|
204
|
+
self.context.set_current_file(str(file_path))
|
|
205
|
+
|
|
206
|
+
# Deduplication now done globally before tag grouping (see above)
|
|
207
|
+
|
|
208
|
+
# EndpointVisitor must exist here due to check above
|
|
209
|
+
if self.visitor is None:
|
|
210
|
+
raise RuntimeError("EndpointVisitor not initialized")
|
|
211
|
+
methods = [self.visitor.visit(op, self.context) for op in ops_for_tag]
|
|
212
|
+
# Pass operations to emit_endpoint_client_class for Protocol generation
|
|
213
|
+
class_content = self.visitor.emit_endpoint_client_class(
|
|
214
|
+
canonical_tag_name, methods, self.context, operations=ops_for_tag
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
imports = self.context.render_imports()
|
|
218
|
+
file_content = imports + "\n\n" + class_content
|
|
219
|
+
self.context.file_manager.write_file(str(file_path), file_content)
|
|
220
|
+
# Store both class and protocol for __init__.py generation
|
|
221
|
+
client_classes.append((class_name, module_name))
|
|
222
|
+
generated_files.append(str(file_path))
|
|
223
|
+
|
|
224
|
+
unique_clients = _deduplicate_tag_clients(client_classes)
|
|
225
|
+
init_lines = []
|
|
226
|
+
if unique_clients:
|
|
227
|
+
# Export both implementation classes and Protocol classes
|
|
228
|
+
all_list_items = []
|
|
229
|
+
for cls, _ in unique_clients:
|
|
230
|
+
protocol_name = f"{cls}Protocol"
|
|
231
|
+
all_list_items.append(f'"{cls}"')
|
|
232
|
+
all_list_items.append(f'"{protocol_name}"')
|
|
233
|
+
|
|
234
|
+
all_list_items = sorted(all_list_items)
|
|
235
|
+
init_lines.append(f"__all__ = [{', '.join(all_list_items)}]")
|
|
236
|
+
|
|
237
|
+
# Import both implementation and Protocol from each module
|
|
238
|
+
for cls, mod in sorted(unique_clients):
|
|
239
|
+
protocol_name = f"{cls}Protocol"
|
|
240
|
+
init_lines.append(f"from .{mod} import {cls}, {protocol_name}")
|
|
241
|
+
|
|
242
|
+
endpoints_init_path = endpoints_dir / "__init__.py"
|
|
243
|
+
self.context.file_manager.write_file(str(endpoints_init_path), "\n".join(init_lines) + "\n")
|
|
244
|
+
if str(endpoints_init_path) not in generated_files:
|
|
245
|
+
generated_files.append(str(endpoints_init_path))
|
|
246
|
+
|
|
247
|
+
return generated_files
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from pyopenapi_gen import IRSpec
|
|
6
|
+
from pyopenapi_gen.context.render_context import RenderContext
|
|
7
|
+
|
|
8
|
+
from ..visit.exception_visitor import ExceptionVisitor
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ExceptionsEmitter:
|
|
12
|
+
"""Generates spec-specific exception aliases with multi-client support.
|
|
13
|
+
|
|
14
|
+
This emitter handles two scenarios:
|
|
15
|
+
1. **Single client**: Generates exception_aliases.py directly in the core package
|
|
16
|
+
2. **Shared core**: Maintains a registry of all needed exception codes across clients
|
|
17
|
+
and regenerates the complete exception_aliases.py file
|
|
18
|
+
|
|
19
|
+
The registry file (.exception_registry.json) tracks which status codes are used by
|
|
20
|
+
which clients, ensuring that when multiple clients share a core package, all required
|
|
21
|
+
exceptions are available.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def __init__(self, core_package_name: str = "core", overall_project_root: str | None = None) -> None:
|
|
25
|
+
self.visitor = ExceptionVisitor()
|
|
26
|
+
self.core_package_name = core_package_name
|
|
27
|
+
self.overall_project_root = overall_project_root
|
|
28
|
+
|
|
29
|
+
def emit(
|
|
30
|
+
self, spec: IRSpec, output_dir: str, client_package_name: str | None = None
|
|
31
|
+
) -> tuple[list[str], list[str]]:
|
|
32
|
+
"""Generate exception aliases for the given spec.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
spec: IRSpec containing operations and responses
|
|
36
|
+
output_dir: Directory where exception_aliases.py will be written
|
|
37
|
+
client_package_name: Name of the client package (for registry tracking)
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
Tuple of (list of generated file paths, list of exception class names)
|
|
41
|
+
"""
|
|
42
|
+
file_path = os.path.join(output_dir, "exception_aliases.py")
|
|
43
|
+
registry_path = os.path.join(output_dir, ".exception_registry.json")
|
|
44
|
+
|
|
45
|
+
context = RenderContext(
|
|
46
|
+
package_root_for_generated_code=output_dir,
|
|
47
|
+
core_package_name=self.core_package_name,
|
|
48
|
+
overall_project_root=self.overall_project_root,
|
|
49
|
+
)
|
|
50
|
+
context.set_current_file(file_path)
|
|
51
|
+
|
|
52
|
+
# Generate exception classes for this spec
|
|
53
|
+
generated_code, alias_names, status_codes = self.visitor.visit(spec, context)
|
|
54
|
+
|
|
55
|
+
# Update registry if we have a client package name (shared core scenario)
|
|
56
|
+
if client_package_name and self._is_shared_core(output_dir):
|
|
57
|
+
all_codes = self._update_registry(registry_path, client_package_name, status_codes)
|
|
58
|
+
# Regenerate with ALL codes from registry
|
|
59
|
+
generated_code, alias_names = self._generate_for_codes(all_codes, context)
|
|
60
|
+
|
|
61
|
+
generated_imports = context.render_imports()
|
|
62
|
+
|
|
63
|
+
alias_names.sort()
|
|
64
|
+
|
|
65
|
+
# Add __all__ list with proper spacing (2 blank lines after last class - Ruff E305)
|
|
66
|
+
if alias_names:
|
|
67
|
+
all_list_str = ", ".join([f'"{name}"' for name in alias_names])
|
|
68
|
+
all_assignment = f"\n\n\n__all__ = [{all_list_str}]\n"
|
|
69
|
+
generated_code += all_assignment
|
|
70
|
+
|
|
71
|
+
full_content = f"{generated_imports}\n\n{generated_code}"
|
|
72
|
+
with open(file_path, "w") as f:
|
|
73
|
+
f.write(full_content)
|
|
74
|
+
|
|
75
|
+
return [file_path], alias_names
|
|
76
|
+
|
|
77
|
+
def _is_shared_core(self, core_dir: str) -> bool:
|
|
78
|
+
"""Check if this core package is shared between multiple clients.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
core_dir: Path to the core package directory
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
True if the core package is outside the immediate client package
|
|
85
|
+
"""
|
|
86
|
+
# If overall_project_root is set and different from the core dir's parent,
|
|
87
|
+
# we're in a shared core scenario
|
|
88
|
+
if self.overall_project_root:
|
|
89
|
+
core_path = Path(core_dir).resolve()
|
|
90
|
+
project_root = Path(self.overall_project_root).resolve()
|
|
91
|
+
# Check if there are other client directories at the same level
|
|
92
|
+
parent_dir = core_path.parent
|
|
93
|
+
return parent_dir == project_root or parent_dir.parent == project_root
|
|
94
|
+
return False
|
|
95
|
+
|
|
96
|
+
def _update_registry(self, registry_path: str, client_name: str, status_codes: list[int]) -> list[int]:
|
|
97
|
+
"""Update the exception registry with this client's status codes.
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
registry_path: Path to the .exception_registry.json file
|
|
101
|
+
client_name: Name of the client package
|
|
102
|
+
status_codes: List of status codes used by this client
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
Complete list of all status codes across all clients
|
|
106
|
+
"""
|
|
107
|
+
registry = {}
|
|
108
|
+
if os.path.exists(registry_path):
|
|
109
|
+
with open(registry_path) as f:
|
|
110
|
+
registry = json.load(f)
|
|
111
|
+
|
|
112
|
+
# Update this client's codes
|
|
113
|
+
registry[client_name] = sorted(status_codes)
|
|
114
|
+
|
|
115
|
+
# Write back to registry
|
|
116
|
+
with open(registry_path, "w") as f:
|
|
117
|
+
json.dump(registry, f, indent=2, sort_keys=True)
|
|
118
|
+
|
|
119
|
+
# Return union of all codes
|
|
120
|
+
all_codes = set()
|
|
121
|
+
for codes in registry.values():
|
|
122
|
+
all_codes.update(codes)
|
|
123
|
+
|
|
124
|
+
return sorted(all_codes)
|
|
125
|
+
|
|
126
|
+
def _generate_for_codes(self, status_codes: list[int], context: RenderContext) -> tuple[str, list[str]]:
|
|
127
|
+
"""Generate exception classes for a specific list of status codes.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
status_codes: List of HTTP status codes to generate exceptions for
|
|
131
|
+
context: Render context for imports
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
Tuple of (generated_code, exception_class_names)
|
|
135
|
+
"""
|
|
136
|
+
from ..core.http_status_codes import (
|
|
137
|
+
get_exception_class_name,
|
|
138
|
+
get_status_name,
|
|
139
|
+
is_client_error,
|
|
140
|
+
is_server_error,
|
|
141
|
+
)
|
|
142
|
+
from ..core.writers.python_construct_renderer import PythonConstructRenderer
|
|
143
|
+
|
|
144
|
+
renderer = PythonConstructRenderer()
|
|
145
|
+
all_exception_code = []
|
|
146
|
+
generated_alias_names = []
|
|
147
|
+
|
|
148
|
+
for code in status_codes:
|
|
149
|
+
# Determine base class
|
|
150
|
+
if is_client_error(code):
|
|
151
|
+
base_class = "ClientError"
|
|
152
|
+
elif is_server_error(code):
|
|
153
|
+
base_class = "ServerError"
|
|
154
|
+
else:
|
|
155
|
+
continue
|
|
156
|
+
|
|
157
|
+
# Get human-readable exception class name (e.g., NotFoundError instead of Error404)
|
|
158
|
+
class_name = get_exception_class_name(code)
|
|
159
|
+
generated_alias_names.append(class_name)
|
|
160
|
+
|
|
161
|
+
# Get human-readable status name for documentation
|
|
162
|
+
status_name = get_status_name(code)
|
|
163
|
+
docstring = f"HTTP {code} {status_name}.\n\nRaised when the server responds with a {code} status code."
|
|
164
|
+
|
|
165
|
+
# Define the __init__ method body
|
|
166
|
+
init_method_body = [
|
|
167
|
+
"def __init__(self, response: Response) -> None:",
|
|
168
|
+
f' """Initialise {class_name} with the HTTP response.',
|
|
169
|
+
"", # Empty line without trailing whitespace (Ruff W293)
|
|
170
|
+
" Args:",
|
|
171
|
+
" response: The httpx Response object that triggered this exception",
|
|
172
|
+
' """',
|
|
173
|
+
" super().__init__(status_code=response.status_code, message=response.text, response=response)",
|
|
174
|
+
]
|
|
175
|
+
|
|
176
|
+
exception_code = renderer.render_class(
|
|
177
|
+
class_name=class_name,
|
|
178
|
+
base_classes=[base_class],
|
|
179
|
+
docstring=docstring,
|
|
180
|
+
body_lines=init_method_body,
|
|
181
|
+
context=context,
|
|
182
|
+
)
|
|
183
|
+
all_exception_code.append(exception_code)
|
|
184
|
+
|
|
185
|
+
# Join the generated class strings with 2 blank lines between classes (PEP 8 / Ruff E302)
|
|
186
|
+
final_code = "\n\n\n".join(all_exception_code)
|
|
187
|
+
return final_code, generated_alias_names
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Emitter for generating mock helper classes.
|
|
3
|
+
|
|
4
|
+
This module creates the mocks/ directory structure with mock implementations
|
|
5
|
+
for both tag-based endpoint clients and the main API client.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import tempfile
|
|
9
|
+
import traceback
|
|
10
|
+
from collections import defaultdict
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
from pyopenapi_gen import IROperation, IRSpec
|
|
14
|
+
from pyopenapi_gen.context.render_context import RenderContext
|
|
15
|
+
from pyopenapi_gen.core.utils import NameSanitizer
|
|
16
|
+
|
|
17
|
+
from ..visit.client_visitor import ClientVisitor
|
|
18
|
+
from ..visit.endpoint.endpoint_visitor import EndpointVisitor
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class MocksEmitter:
|
|
22
|
+
"""Generates mock helper classes for testing."""
|
|
23
|
+
|
|
24
|
+
def __init__(self, context: RenderContext) -> None:
|
|
25
|
+
self.endpoint_visitor = EndpointVisitor()
|
|
26
|
+
self.client_visitor = ClientVisitor()
|
|
27
|
+
self.context = context
|
|
28
|
+
|
|
29
|
+
def emit(self, spec: IRSpec, output_dir_str: str) -> list[str]:
|
|
30
|
+
"""
|
|
31
|
+
Generate all mock files in mocks/ directory structure.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
spec: IR specification
|
|
35
|
+
output_dir_str: Output directory path
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
List of generated file paths
|
|
39
|
+
"""
|
|
40
|
+
error_log = Path(tempfile.gettempdir()) / "pyopenapi_gen_mocks_error.log"
|
|
41
|
+
generated_files = []
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
output_dir_abs = Path(output_dir_str)
|
|
45
|
+
mocks_dir = output_dir_abs / "mocks"
|
|
46
|
+
mocks_dir.mkdir(parents=True, exist_ok=True)
|
|
47
|
+
|
|
48
|
+
# Group operations by tag
|
|
49
|
+
operations_by_tag = self._group_operations_by_tag(spec)
|
|
50
|
+
|
|
51
|
+
# Track tag information for main client generation
|
|
52
|
+
tag_tuples = []
|
|
53
|
+
|
|
54
|
+
# Generate mock endpoint classes
|
|
55
|
+
mock_endpoints_dir = mocks_dir / "endpoints"
|
|
56
|
+
mock_endpoints_dir.mkdir(parents=True, exist_ok=True)
|
|
57
|
+
|
|
58
|
+
for tag, ops_for_tag in operations_by_tag.items():
|
|
59
|
+
if not ops_for_tag:
|
|
60
|
+
continue
|
|
61
|
+
|
|
62
|
+
canonical_tag_name = tag if tag else "default"
|
|
63
|
+
class_name = NameSanitizer.sanitize_class_name(canonical_tag_name) + "Client"
|
|
64
|
+
module_name = NameSanitizer.sanitize_module_name(canonical_tag_name)
|
|
65
|
+
|
|
66
|
+
# Track for main client generation
|
|
67
|
+
tag_tuples.append((canonical_tag_name, class_name, module_name))
|
|
68
|
+
|
|
69
|
+
# Generate mock class
|
|
70
|
+
mock_file_path = mock_endpoints_dir / f"mock_{module_name}.py"
|
|
71
|
+
self.context.set_current_file(str(mock_file_path))
|
|
72
|
+
|
|
73
|
+
mock_code = self.endpoint_visitor.generate_endpoint_mock_class(
|
|
74
|
+
canonical_tag_name, ops_for_tag, self.context
|
|
75
|
+
)
|
|
76
|
+
imports_code = self.context.render_imports()
|
|
77
|
+
file_content = imports_code + "\n\n" + mock_code
|
|
78
|
+
|
|
79
|
+
self.context.file_manager.write_file(str(mock_file_path), file_content)
|
|
80
|
+
generated_files.append(str(mock_file_path))
|
|
81
|
+
|
|
82
|
+
# Generate mock endpoints __init__.py
|
|
83
|
+
endpoints_init_path = mock_endpoints_dir / "__init__.py"
|
|
84
|
+
endpoints_init_content = self._generate_mock_endpoints_init(tag_tuples)
|
|
85
|
+
self.context.file_manager.write_file(str(endpoints_init_path), endpoints_init_content)
|
|
86
|
+
generated_files.append(str(endpoints_init_path))
|
|
87
|
+
|
|
88
|
+
# Generate main mock client
|
|
89
|
+
mock_client_path = mocks_dir / "mock_client.py"
|
|
90
|
+
self.context.set_current_file(str(mock_client_path))
|
|
91
|
+
|
|
92
|
+
mock_client_code = self.client_visitor.generate_client_mock_class(spec, self.context, tag_tuples)
|
|
93
|
+
imports_code = self.context.render_imports()
|
|
94
|
+
file_content = imports_code + "\n\n" + mock_client_code
|
|
95
|
+
|
|
96
|
+
self.context.file_manager.write_file(str(mock_client_path), file_content)
|
|
97
|
+
generated_files.append(str(mock_client_path))
|
|
98
|
+
|
|
99
|
+
# Generate mocks __init__.py
|
|
100
|
+
mocks_init_path = mocks_dir / "__init__.py"
|
|
101
|
+
mocks_init_content = self._generate_mocks_init(tag_tuples)
|
|
102
|
+
self.context.file_manager.write_file(str(mocks_init_path), mocks_init_content)
|
|
103
|
+
generated_files.append(str(mocks_init_path))
|
|
104
|
+
|
|
105
|
+
return generated_files
|
|
106
|
+
|
|
107
|
+
except Exception as e:
|
|
108
|
+
with open(error_log, "a") as f:
|
|
109
|
+
f.write(f"ERROR in MocksEmitter.emit: {e}\n")
|
|
110
|
+
f.write(traceback.format_exc())
|
|
111
|
+
raise
|
|
112
|
+
|
|
113
|
+
def _group_operations_by_tag(self, spec: IRSpec) -> dict[str, list[IROperation]]:
|
|
114
|
+
"""Group operations by their OpenAPI tag."""
|
|
115
|
+
operations_by_tag: dict[str, list[IROperation]] = defaultdict(list)
|
|
116
|
+
|
|
117
|
+
for operation in spec.operations:
|
|
118
|
+
tag = operation.tags[0] if operation.tags else "default"
|
|
119
|
+
operations_by_tag[tag].append(operation)
|
|
120
|
+
|
|
121
|
+
return operations_by_tag
|
|
122
|
+
|
|
123
|
+
def _generate_mock_endpoints_init(self, tag_tuples: list[tuple[str, str, str]]) -> str:
|
|
124
|
+
"""Generate __init__.py for mocks/endpoints/ directory."""
|
|
125
|
+
lines = []
|
|
126
|
+
lines.append('"""')
|
|
127
|
+
lines.append("Mock endpoint clients for testing.")
|
|
128
|
+
lines.append("")
|
|
129
|
+
lines.append("Import mock classes to use as base classes for your test doubles.")
|
|
130
|
+
lines.append('"""')
|
|
131
|
+
lines.append("")
|
|
132
|
+
|
|
133
|
+
# Import statements
|
|
134
|
+
all_exports = []
|
|
135
|
+
for tag, class_name, module_name in sorted(tag_tuples, key=lambda x: x[2]):
|
|
136
|
+
mock_class_name = f"Mock{class_name}"
|
|
137
|
+
lines.append(f"from .mock_{module_name} import {mock_class_name}")
|
|
138
|
+
all_exports.append(mock_class_name)
|
|
139
|
+
|
|
140
|
+
lines.append("")
|
|
141
|
+
lines.append("__all__ = [")
|
|
142
|
+
for export in all_exports:
|
|
143
|
+
lines.append(f' "{export}",')
|
|
144
|
+
lines.append("]")
|
|
145
|
+
|
|
146
|
+
return "\n".join(lines)
|
|
147
|
+
|
|
148
|
+
def _generate_mocks_init(self, tag_tuples: list[tuple[str, str, str]]) -> str:
|
|
149
|
+
"""Generate __init__.py for mocks/ directory."""
|
|
150
|
+
lines = []
|
|
151
|
+
lines.append('"""')
|
|
152
|
+
lines.append("Mock implementations for testing.")
|
|
153
|
+
lines.append("")
|
|
154
|
+
lines.append("These mocks implement the Protocol contracts without requiring")
|
|
155
|
+
lines.append("network transport or authentication. Use them as base classes")
|
|
156
|
+
lines.append("in your tests.")
|
|
157
|
+
lines.append("")
|
|
158
|
+
lines.append("Example:")
|
|
159
|
+
lines.append(" from myapi.mocks import MockAPIClient, MockPetsClient")
|
|
160
|
+
lines.append("")
|
|
161
|
+
lines.append(" class TestPetsClient(MockPetsClient):")
|
|
162
|
+
lines.append(" async def list_pets(self, limit: int | None = None) -> list[Pet]:")
|
|
163
|
+
lines.append(" return [Pet(id=1, name='Test Pet')]")
|
|
164
|
+
lines.append("")
|
|
165
|
+
lines.append(" client = MockAPIClient(pets=TestPetsClient())")
|
|
166
|
+
lines.append('"""')
|
|
167
|
+
lines.append("")
|
|
168
|
+
|
|
169
|
+
# Import main mock client
|
|
170
|
+
lines.append("from .mock_client import MockAPIClient")
|
|
171
|
+
|
|
172
|
+
# Import mock endpoint classes
|
|
173
|
+
all_exports = ["MockAPIClient"]
|
|
174
|
+
for tag, class_name, module_name in sorted(tag_tuples, key=lambda x: x[2]):
|
|
175
|
+
mock_class_name = f"Mock{class_name}"
|
|
176
|
+
lines.append(f"from .endpoints.mock_{module_name} import {mock_class_name}")
|
|
177
|
+
all_exports.append(mock_class_name)
|
|
178
|
+
|
|
179
|
+
lines.append("")
|
|
180
|
+
lines.append("__all__ = [")
|
|
181
|
+
for export in all_exports:
|
|
182
|
+
lines.append(f' "{export}",')
|
|
183
|
+
lines.append("]")
|
|
184
|
+
|
|
185
|
+
return "\n".join(lines)
|