pyopenapi-gen 0.8.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyopenapi_gen/__init__.py +114 -0
- pyopenapi_gen/__main__.py +6 -0
- pyopenapi_gen/cli.py +86 -0
- pyopenapi_gen/context/file_manager.py +52 -0
- pyopenapi_gen/context/import_collector.py +382 -0
- pyopenapi_gen/context/render_context.py +630 -0
- pyopenapi_gen/core/__init__.py +0 -0
- pyopenapi_gen/core/auth/base.py +22 -0
- pyopenapi_gen/core/auth/plugins.py +89 -0
- pyopenapi_gen/core/exceptions.py +25 -0
- pyopenapi_gen/core/http_transport.py +219 -0
- pyopenapi_gen/core/loader/__init__.py +12 -0
- pyopenapi_gen/core/loader/loader.py +158 -0
- pyopenapi_gen/core/loader/operations/__init__.py +12 -0
- pyopenapi_gen/core/loader/operations/parser.py +155 -0
- pyopenapi_gen/core/loader/operations/post_processor.py +60 -0
- pyopenapi_gen/core/loader/operations/request_body.py +85 -0
- pyopenapi_gen/core/loader/parameters/__init__.py +10 -0
- pyopenapi_gen/core/loader/parameters/parser.py +121 -0
- pyopenapi_gen/core/loader/responses/__init__.py +10 -0
- pyopenapi_gen/core/loader/responses/parser.py +104 -0
- pyopenapi_gen/core/loader/schemas/__init__.py +11 -0
- pyopenapi_gen/core/loader/schemas/extractor.py +184 -0
- pyopenapi_gen/core/pagination.py +64 -0
- pyopenapi_gen/core/parsing/__init__.py +13 -0
- pyopenapi_gen/core/parsing/common/__init__.py +1 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/__init__.py +9 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/__init__.py +0 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/cyclic_properties.py +66 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/direct_cycle.py +33 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/existing_schema.py +22 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/list_response.py +54 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/missing_ref.py +52 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/new_schema.py +50 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/stripped_suffix.py +51 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/resolve_schema_ref.py +86 -0
- pyopenapi_gen/core/parsing/common/type_parser.py +74 -0
- pyopenapi_gen/core/parsing/context.py +184 -0
- pyopenapi_gen/core/parsing/cycle_helpers.py +123 -0
- pyopenapi_gen/core/parsing/keywords/__init__.py +1 -0
- pyopenapi_gen/core/parsing/keywords/all_of_parser.py +77 -0
- pyopenapi_gen/core/parsing/keywords/any_of_parser.py +79 -0
- pyopenapi_gen/core/parsing/keywords/array_items_parser.py +69 -0
- pyopenapi_gen/core/parsing/keywords/one_of_parser.py +72 -0
- pyopenapi_gen/core/parsing/keywords/properties_parser.py +98 -0
- pyopenapi_gen/core/parsing/schema_finalizer.py +166 -0
- pyopenapi_gen/core/parsing/schema_parser.py +610 -0
- pyopenapi_gen/core/parsing/transformers/__init__.py +0 -0
- pyopenapi_gen/core/parsing/transformers/inline_enum_extractor.py +285 -0
- pyopenapi_gen/core/parsing/transformers/inline_object_promoter.py +117 -0
- pyopenapi_gen/core/parsing/unified_cycle_detection.py +293 -0
- pyopenapi_gen/core/postprocess_manager.py +161 -0
- pyopenapi_gen/core/schemas.py +40 -0
- pyopenapi_gen/core/streaming_helpers.py +86 -0
- pyopenapi_gen/core/telemetry.py +67 -0
- pyopenapi_gen/core/utils.py +409 -0
- pyopenapi_gen/core/warning_collector.py +83 -0
- pyopenapi_gen/core/writers/code_writer.py +135 -0
- pyopenapi_gen/core/writers/documentation_writer.py +222 -0
- pyopenapi_gen/core/writers/line_writer.py +217 -0
- pyopenapi_gen/core/writers/python_construct_renderer.py +274 -0
- pyopenapi_gen/core_package_template/README.md +21 -0
- pyopenapi_gen/emit/models_emitter.py +143 -0
- pyopenapi_gen/emitters/client_emitter.py +51 -0
- pyopenapi_gen/emitters/core_emitter.py +181 -0
- pyopenapi_gen/emitters/docs_emitter.py +44 -0
- pyopenapi_gen/emitters/endpoints_emitter.py +223 -0
- pyopenapi_gen/emitters/exceptions_emitter.py +52 -0
- pyopenapi_gen/emitters/models_emitter.py +428 -0
- pyopenapi_gen/generator/client_generator.py +562 -0
- pyopenapi_gen/helpers/__init__.py +1 -0
- pyopenapi_gen/helpers/endpoint_utils.py +552 -0
- pyopenapi_gen/helpers/type_cleaner.py +341 -0
- pyopenapi_gen/helpers/type_helper.py +112 -0
- pyopenapi_gen/helpers/type_resolution/__init__.py +1 -0
- pyopenapi_gen/helpers/type_resolution/array_resolver.py +57 -0
- pyopenapi_gen/helpers/type_resolution/composition_resolver.py +79 -0
- pyopenapi_gen/helpers/type_resolution/finalizer.py +89 -0
- pyopenapi_gen/helpers/type_resolution/named_resolver.py +174 -0
- pyopenapi_gen/helpers/type_resolution/object_resolver.py +212 -0
- pyopenapi_gen/helpers/type_resolution/primitive_resolver.py +57 -0
- pyopenapi_gen/helpers/type_resolution/resolver.py +48 -0
- pyopenapi_gen/helpers/url_utils.py +14 -0
- pyopenapi_gen/http_types.py +20 -0
- pyopenapi_gen/ir.py +167 -0
- pyopenapi_gen/py.typed +1 -0
- pyopenapi_gen/types/__init__.py +11 -0
- pyopenapi_gen/types/contracts/__init__.py +13 -0
- pyopenapi_gen/types/contracts/protocols.py +106 -0
- pyopenapi_gen/types/contracts/types.py +30 -0
- pyopenapi_gen/types/resolvers/__init__.py +7 -0
- pyopenapi_gen/types/resolvers/reference_resolver.py +71 -0
- pyopenapi_gen/types/resolvers/response_resolver.py +203 -0
- pyopenapi_gen/types/resolvers/schema_resolver.py +367 -0
- pyopenapi_gen/types/services/__init__.py +5 -0
- pyopenapi_gen/types/services/type_service.py +133 -0
- pyopenapi_gen/visit/client_visitor.py +228 -0
- pyopenapi_gen/visit/docs_visitor.py +38 -0
- pyopenapi_gen/visit/endpoint/__init__.py +1 -0
- pyopenapi_gen/visit/endpoint/endpoint_visitor.py +103 -0
- pyopenapi_gen/visit/endpoint/generators/__init__.py +1 -0
- pyopenapi_gen/visit/endpoint/generators/docstring_generator.py +121 -0
- pyopenapi_gen/visit/endpoint/generators/endpoint_method_generator.py +87 -0
- pyopenapi_gen/visit/endpoint/generators/request_generator.py +103 -0
- pyopenapi_gen/visit/endpoint/generators/response_handler_generator.py +497 -0
- pyopenapi_gen/visit/endpoint/generators/signature_generator.py +88 -0
- pyopenapi_gen/visit/endpoint/generators/url_args_generator.py +183 -0
- pyopenapi_gen/visit/endpoint/processors/__init__.py +1 -0
- pyopenapi_gen/visit/endpoint/processors/import_analyzer.py +76 -0
- pyopenapi_gen/visit/endpoint/processors/parameter_processor.py +171 -0
- pyopenapi_gen/visit/exception_visitor.py +52 -0
- pyopenapi_gen/visit/model/__init__.py +0 -0
- pyopenapi_gen/visit/model/alias_generator.py +89 -0
- pyopenapi_gen/visit/model/dataclass_generator.py +197 -0
- pyopenapi_gen/visit/model/enum_generator.py +200 -0
- pyopenapi_gen/visit/model/model_visitor.py +197 -0
- pyopenapi_gen/visit/visitor.py +97 -0
- pyopenapi_gen-0.8.3.dist-info/METADATA +224 -0
- pyopenapi_gen-0.8.3.dist-info/RECORD +122 -0
- pyopenapi_gen-0.8.3.dist-info/WHEEL +4 -0
- pyopenapi_gen-0.8.3.dist-info/entry_points.txt +2 -0
- pyopenapi_gen-0.8.3.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,562 @@
|
|
1
|
+
"""
|
2
|
+
ClientGenerator: Encapsulates the OpenAPI client generation logic for use by CLI or other frontends.
|
3
|
+
"""
|
4
|
+
|
5
|
+
import logging
|
6
|
+
import os
|
7
|
+
import shutil
|
8
|
+
import tempfile
|
9
|
+
import time
|
10
|
+
from datetime import datetime
|
11
|
+
from pathlib import Path
|
12
|
+
from typing import Any, Dict, List, Optional
|
13
|
+
|
14
|
+
from pyopenapi_gen.context.render_context import RenderContext
|
15
|
+
from pyopenapi_gen.core.loader.loader import load_ir_from_spec
|
16
|
+
from pyopenapi_gen.core.postprocess_manager import PostprocessManager
|
17
|
+
from pyopenapi_gen.core.warning_collector import WarningCollector
|
18
|
+
from pyopenapi_gen.emitters.client_emitter import ClientEmitter
|
19
|
+
from pyopenapi_gen.emitters.core_emitter import CoreEmitter
|
20
|
+
from pyopenapi_gen.emitters.endpoints_emitter import EndpointsEmitter
|
21
|
+
from pyopenapi_gen.emitters.exceptions_emitter import ExceptionsEmitter
|
22
|
+
from pyopenapi_gen.emitters.models_emitter import ModelsEmitter
|
23
|
+
|
24
|
+
logger = logging.getLogger(__name__)
|
25
|
+
|
26
|
+
|
27
|
+
class GenerationError(Exception):
|
28
|
+
"""Raised when client generation fails due to errors or diffs."""
|
29
|
+
|
30
|
+
pass
|
31
|
+
|
32
|
+
|
33
|
+
class ClientGenerator:
|
34
|
+
"""
|
35
|
+
Generates a Python OpenAPI client package from a given OpenAPI spec file or URL.
|
36
|
+
|
37
|
+
This class encapsulates all logic for code generation, diffing, post-processing, and output management.
|
38
|
+
It is independent of any CLI or UI framework and can be used programmatically.
|
39
|
+
"""
|
40
|
+
|
41
|
+
def __init__(self, verbose: bool = True) -> None:
|
42
|
+
"""
|
43
|
+
Initialize the client generator.
|
44
|
+
|
45
|
+
Args:
|
46
|
+
verbose: Whether to output detailed progress information.
|
47
|
+
"""
|
48
|
+
self.verbose = verbose
|
49
|
+
self.start_time = time.time()
|
50
|
+
self.timings: Dict[str, float] = {}
|
51
|
+
|
52
|
+
def _log_progress(self, message: str, stage: Optional[str] = None) -> None:
|
53
|
+
"""
|
54
|
+
Log a progress message with timestamp.
|
55
|
+
|
56
|
+
Args:
|
57
|
+
message: The progress message to log.
|
58
|
+
stage: Optional name of the current stage for timing information.
|
59
|
+
"""
|
60
|
+
if not self.verbose:
|
61
|
+
return
|
62
|
+
|
63
|
+
elapsed = time.time() - self.start_time
|
64
|
+
timestamp = datetime.now().strftime("%H:%M:%S")
|
65
|
+
|
66
|
+
if stage:
|
67
|
+
# Mark stage start
|
68
|
+
if stage not in self.timings:
|
69
|
+
self.timings[stage] = time.time()
|
70
|
+
stage_msg = f"[STARTING {stage}]"
|
71
|
+
else:
|
72
|
+
# Mark stage end
|
73
|
+
stage_time = time.time() - self.timings[stage]
|
74
|
+
stage_msg = f"[COMPLETED {stage} in {stage_time:.2f}s]"
|
75
|
+
|
76
|
+
log_msg = f"{timestamp} ({elapsed:.2f}s) {stage_msg} {message}"
|
77
|
+
else:
|
78
|
+
log_msg = f"{timestamp} ({elapsed:.2f}s) {message}"
|
79
|
+
|
80
|
+
# logger.info(log_msg) # Keep commented out to ensure test_gen_nonexistent_spec_path passes
|
81
|
+
# Also print to stdout for CLI users
|
82
|
+
# print(log_msg) # Keep commented out
|
83
|
+
|
84
|
+
def generate(
|
85
|
+
self,
|
86
|
+
spec_path: str,
|
87
|
+
project_root: Path,
|
88
|
+
output_package: str,
|
89
|
+
force: bool = False,
|
90
|
+
no_postprocess: bool = False,
|
91
|
+
core_package: Optional[str] = None,
|
92
|
+
) -> List[Path]:
|
93
|
+
"""
|
94
|
+
Generate the client code from the OpenAPI spec.
|
95
|
+
|
96
|
+
Args:
|
97
|
+
spec_path (str): Path or URL to the OpenAPI spec file.
|
98
|
+
project_root (Path): Path to the root of the Python project (absolute or relative).
|
99
|
+
output_package (str): Python package path for the generated client (e.g., 'pyapis.my_api_client').
|
100
|
+
force (bool): Overwrite output without diff check.
|
101
|
+
name (Optional[str]): Custom client package name (not used).
|
102
|
+
docs (bool): Kept for interface compatibility.
|
103
|
+
telemetry (bool): Kept for interface compatibility.
|
104
|
+
auth (Optional[str]): Kept for interface compatibility.
|
105
|
+
no_postprocess (bool): Skip post-processing (type checking, etc.).
|
106
|
+
core_package (str): Python package path for the core package.
|
107
|
+
|
108
|
+
Raises:
|
109
|
+
GenerationError: If generation fails or diffs are found (when not forcing overwrite).
|
110
|
+
"""
|
111
|
+
self._log_progress(f"Starting code generation for specification: {spec_path}", "GENERATION")
|
112
|
+
project_root = Path(project_root).resolve()
|
113
|
+
|
114
|
+
# Stage 1: Load Spec
|
115
|
+
self._log_progress(f"Loading specification from {spec_path}", "LOAD_SPEC")
|
116
|
+
spec_dict = self._load_spec(spec_path)
|
117
|
+
self._log_progress(f"Loaded specification with {len(spec_dict)} top-level keys", "LOAD_SPEC")
|
118
|
+
|
119
|
+
# Stage 2: Parse to IR
|
120
|
+
self._log_progress(f"Parsing specification into intermediate representation", "PARSE_IR")
|
121
|
+
ir = load_ir_from_spec(spec_dict)
|
122
|
+
|
123
|
+
# Log stats about the IR
|
124
|
+
schema_count = len(ir.schemas) if ir.schemas else 0
|
125
|
+
operation_count = len(ir.operations) if ir.operations else 0
|
126
|
+
self._log_progress(f"Parsed IR with {schema_count} schemas and {operation_count} operations", "PARSE_IR")
|
127
|
+
|
128
|
+
# Stage 3: Collect warnings
|
129
|
+
self._log_progress("Collecting warnings", "WARNINGS")
|
130
|
+
collector = WarningCollector()
|
131
|
+
reports = collector.collect(ir)
|
132
|
+
for report in reports:
|
133
|
+
warning_msg = f"WARNING [{report.code}]: {report.message} (Hint: {report.hint})"
|
134
|
+
# print(warning_msg) # Changed to logger.warning
|
135
|
+
logger.warning(warning_msg)
|
136
|
+
self._log_progress(f"Found {len(reports)} warnings", "WARNINGS")
|
137
|
+
|
138
|
+
# Resolve output and core directories from package paths
|
139
|
+
def pkg_to_path(pkg: str) -> Path:
|
140
|
+
return project_root.joinpath(*pkg.split("."))
|
141
|
+
|
142
|
+
# Default output_package if not set
|
143
|
+
if not output_package:
|
144
|
+
raise ValueError("Output package name cannot be empty")
|
145
|
+
out_dir = pkg_to_path(output_package)
|
146
|
+
|
147
|
+
# --- Robust Defaulting for core_package ---
|
148
|
+
if core_package is None: # User did not specify, use default relative to output_package
|
149
|
+
resolved_core_package_fqn = output_package + ".core"
|
150
|
+
else: # User specified something, use it as is
|
151
|
+
resolved_core_package_fqn = core_package
|
152
|
+
# --- End Robust Defaulting ---
|
153
|
+
|
154
|
+
# Determine core_dir (physical path for CoreEmitter)
|
155
|
+
core_dir = pkg_to_path(resolved_core_package_fqn)
|
156
|
+
|
157
|
+
# The actual_core_module_name_for_emitter_init becomes resolved_core_package_fqn
|
158
|
+
# The core_import_path_for_context also becomes resolved_core_package_fqn
|
159
|
+
|
160
|
+
self._log_progress(f"Output directory: {out_dir}", "CONFIG")
|
161
|
+
self._log_progress(f"Core package: {resolved_core_package_fqn}", "CONFIG")
|
162
|
+
|
163
|
+
generated_files = []
|
164
|
+
|
165
|
+
# Create RenderContext once and populate its parsed_schemas for the force=True path
|
166
|
+
# It will be used if not doing a diff, or after a successful diff.
|
167
|
+
self._log_progress("Creating render context", "INIT")
|
168
|
+
main_render_context = RenderContext(
|
169
|
+
core_package_name=resolved_core_package_fqn,
|
170
|
+
package_root_for_generated_code=str(out_dir),
|
171
|
+
overall_project_root=str(project_root),
|
172
|
+
parsed_schemas=ir.schemas,
|
173
|
+
output_package_name=output_package,
|
174
|
+
)
|
175
|
+
|
176
|
+
if not force and out_dir.exists():
|
177
|
+
self._log_progress("Checking for differences with existing output", "DIFF_CHECK")
|
178
|
+
# --- Refactored Diff Logic ---
|
179
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
180
|
+
tmp_project_root_for_diff = Path(tmpdir)
|
181
|
+
|
182
|
+
# Define temporary destination paths based on the temp project root
|
183
|
+
def tmp_pkg_to_path(pkg: str) -> Path:
|
184
|
+
# Ensure the path is relative to the temp root, not the final project root
|
185
|
+
return tmp_project_root_for_diff.joinpath(*pkg.split("."))
|
186
|
+
|
187
|
+
tmp_out_dir_for_diff = tmp_pkg_to_path(output_package)
|
188
|
+
tmp_core_dir_for_diff = tmp_pkg_to_path(resolved_core_package_fqn)
|
189
|
+
|
190
|
+
# Ensure temporary directories exist (FileManager used by emitters might handle this,
|
191
|
+
# but explicit is safer)
|
192
|
+
tmp_out_dir_for_diff.mkdir(parents=True, exist_ok=True)
|
193
|
+
tmp_core_dir_for_diff.mkdir(parents=True, exist_ok=True) # Ensure core temp dir always exists
|
194
|
+
|
195
|
+
# --- Generate files into the temporary structure ---
|
196
|
+
temp_generated_files = [] # Track files generated in temp dir
|
197
|
+
|
198
|
+
# 1. ExceptionsEmitter (emits exception_aliases.py to tmp_core_dir_for_diff)
|
199
|
+
self._log_progress("Generating exception files (temp)", "EMIT_EXCEPTIONS_TEMP")
|
200
|
+
exceptions_emitter = ExceptionsEmitter(
|
201
|
+
core_package_name=resolved_core_package_fqn,
|
202
|
+
overall_project_root=str(tmp_project_root_for_diff), # Use temp project root for context
|
203
|
+
)
|
204
|
+
exception_files_list, exception_alias_names = exceptions_emitter.emit(
|
205
|
+
ir, str(tmp_core_dir_for_diff)
|
206
|
+
) # Emit TO temp core dir
|
207
|
+
exception_files = [Path(p) for p in exception_files_list]
|
208
|
+
temp_generated_files += exception_files
|
209
|
+
self._log_progress(f"Generated {len(exception_files)} exception files (temp)", "EMIT_EXCEPTIONS_TEMP")
|
210
|
+
|
211
|
+
# 2. CoreEmitter (emits core files to tmp_core_dir_for_diff)
|
212
|
+
self._log_progress("Generating core files (temp)", "EMIT_CORE_TEMP")
|
213
|
+
# Note: CoreEmitter copies files, RenderContext isn't strictly needed for it, but path must be correct.
|
214
|
+
relative_core_path_for_emitter_init_temp = os.path.relpath(tmp_core_dir_for_diff, tmp_out_dir_for_diff)
|
215
|
+
core_emitter = CoreEmitter(
|
216
|
+
core_dir=str(relative_core_path_for_emitter_init_temp),
|
217
|
+
core_package=resolved_core_package_fqn,
|
218
|
+
exception_alias_names=exception_alias_names,
|
219
|
+
)
|
220
|
+
core_files = [Path(p) for p in core_emitter.emit(str(tmp_out_dir_for_diff))]
|
221
|
+
temp_generated_files += core_files
|
222
|
+
self._log_progress(f"Generated {len(core_files)} core files (temp)", "EMIT_CORE_TEMP")
|
223
|
+
|
224
|
+
# 3. config.py (write to tmp_core_dir_for_diff using FileManager) - REMOVED, CoreEmitter handles this
|
225
|
+
# fm = FileManager()
|
226
|
+
# config_dst_temp = tmp_core_dir_for_diff / "config.py"
|
227
|
+
# config_content = CONFIG_TEMPLATE
|
228
|
+
# fm.write_file(str(config_dst_temp), config_content)
|
229
|
+
# temp_generated_files.append(config_dst_temp)
|
230
|
+
|
231
|
+
# 4. ModelsEmitter (emits models to tmp_out_dir_for_diff/models)
|
232
|
+
self._log_progress("Generating model files (temp)", "EMIT_MODELS_TEMP")
|
233
|
+
# Create a temporary RenderContext for the diff path
|
234
|
+
tmp_render_context_for_diff = RenderContext(
|
235
|
+
core_package_name=resolved_core_package_fqn,
|
236
|
+
package_root_for_generated_code=str(tmp_out_dir_for_diff),
|
237
|
+
overall_project_root=str(tmp_project_root_for_diff),
|
238
|
+
parsed_schemas=ir.schemas,
|
239
|
+
output_package_name=output_package,
|
240
|
+
)
|
241
|
+
models_emitter = ModelsEmitter(context=tmp_render_context_for_diff, parsed_schemas=ir.schemas)
|
242
|
+
model_files_dict = models_emitter.emit(
|
243
|
+
ir, str(tmp_out_dir_for_diff)
|
244
|
+
) # ModelsEmitter.emit now takes IRSpec
|
245
|
+
temp_generated_files += [
|
246
|
+
Path(p) for p_list in model_files_dict.values() for p in p_list
|
247
|
+
] # Flatten list of lists
|
248
|
+
schema_count = len(ir.schemas) if ir.schemas else 0
|
249
|
+
self._log_progress(
|
250
|
+
f"Generated {len(model_files_dict)} model files for {schema_count} schemas (temp)",
|
251
|
+
"EMIT_MODELS_TEMP",
|
252
|
+
)
|
253
|
+
|
254
|
+
# 5. EndpointsEmitter (emits endpoints to tmp_out_dir_for_diff/endpoints)
|
255
|
+
self._log_progress("Generating endpoint files (temp)", "EMIT_ENDPOINTS_TEMP")
|
256
|
+
endpoints_emitter = EndpointsEmitter(context=tmp_render_context_for_diff)
|
257
|
+
endpoint_files = [
|
258
|
+
Path(p)
|
259
|
+
for p in endpoints_emitter.emit(
|
260
|
+
ir.operations, str(tmp_out_dir_for_diff)
|
261
|
+
) # emit takes ir.operations, str output_dir
|
262
|
+
]
|
263
|
+
temp_generated_files += endpoint_files
|
264
|
+
operation_count = len(ir.operations) if ir.operations else 0
|
265
|
+
self._log_progress(
|
266
|
+
f"Generated {len(endpoint_files)} endpoint files for {operation_count} operations (temp)",
|
267
|
+
"EMIT_ENDPOINTS_TEMP",
|
268
|
+
)
|
269
|
+
|
270
|
+
# 6. ClientEmitter (emits client.py to tmp_out_dir_for_diff)
|
271
|
+
self._log_progress("Generating client file (temp)", "EMIT_CLIENT_TEMP")
|
272
|
+
client_emitter = ClientEmitter(context=tmp_render_context_for_diff) # ClientEmitter now takes context
|
273
|
+
client_files = [
|
274
|
+
Path(p) for p in client_emitter.emit(ir, str(tmp_out_dir_for_diff)) # emit takes ir, str output_dir
|
275
|
+
]
|
276
|
+
temp_generated_files += client_files
|
277
|
+
self._log_progress(f"Generated {len(client_files)} client files (temp)", "EMIT_CLIENT_TEMP")
|
278
|
+
|
279
|
+
# Post-processing should run on the temporary files if enabled
|
280
|
+
if not no_postprocess:
|
281
|
+
self._log_progress("Running post-processing on temporary files", "POSTPROCESS_TEMP")
|
282
|
+
# Pass the temp project root to PostprocessManager
|
283
|
+
PostprocessManager(str(tmp_project_root_for_diff)).run([str(p) for p in temp_generated_files])
|
284
|
+
self._log_progress(f"Post-processed {len(temp_generated_files)} files", "POSTPROCESS_TEMP")
|
285
|
+
|
286
|
+
# --- Compare final output dirs with the temp output dirs ---
|
287
|
+
self._log_progress("Comparing generated files with existing files", "DIFF")
|
288
|
+
# Compare client package dir
|
289
|
+
self._log_progress(f"Checking client package differences", "DIFF_CLIENT")
|
290
|
+
has_diff_client = self._show_diffs(str(out_dir), str(tmp_out_dir_for_diff))
|
291
|
+
|
292
|
+
# Compare core package dir IF it's different from the client dir
|
293
|
+
has_diff_core = False
|
294
|
+
if core_dir != out_dir:
|
295
|
+
self._log_progress(f"Checking core package differences", "DIFF_CORE")
|
296
|
+
has_diff_core = self._show_diffs(str(core_dir), str(tmp_core_dir_for_diff))
|
297
|
+
|
298
|
+
if has_diff_client or has_diff_core:
|
299
|
+
self._log_progress("Differences found, not updating existing output", "DIFF_RESULT")
|
300
|
+
raise GenerationError("Differences found between generated and existing output.")
|
301
|
+
|
302
|
+
self._log_progress("No differences found, using existing files", "DIFF_RESULT")
|
303
|
+
# If no diffs, return the paths of the *existing* files (no changes made)
|
304
|
+
# We need to collect the actual existing file paths corresponding to temp_generated_files
|
305
|
+
# This is tricky because _show_diffs only returns bool.
|
306
|
+
# A simpler approach if no diff: do nothing, return empty list? Or paths of existing files?
|
307
|
+
# Let's return the existing paths for consistency with the `else` block.
|
308
|
+
# Need to map temp_generated_files back to original project_root based paths.
|
309
|
+
final_generated_files = []
|
310
|
+
for tmp_file in temp_generated_files:
|
311
|
+
try:
|
312
|
+
# Find relative path from temp root
|
313
|
+
rel_path = tmp_file.relative_to(tmp_project_root_for_diff)
|
314
|
+
# Construct path relative to final project root
|
315
|
+
final_path = project_root / rel_path
|
316
|
+
if final_path.exists(): # Should exist if no diff
|
317
|
+
final_generated_files.append(final_path)
|
318
|
+
except ValueError:
|
319
|
+
# Should not happen if paths are constructed correctly
|
320
|
+
print(f"Warning: Could not map temporary file {tmp_file} back to project root {project_root}")
|
321
|
+
generated_files = final_generated_files
|
322
|
+
self._log_progress(f"Mapped {len(generated_files)} existing files", "DIFF_COMPLETE")
|
323
|
+
|
324
|
+
# --- End Refactored Diff Logic ---
|
325
|
+
else: # This is the force=True or first-run logic
|
326
|
+
self._log_progress("Direct generation (force=True or first run)", "DIRECT_GEN")
|
327
|
+
if out_dir.exists():
|
328
|
+
self._log_progress(f"Removing existing directory: {out_dir}", "CLEANUP")
|
329
|
+
shutil.rmtree(str(out_dir))
|
330
|
+
# Ensure parent dirs exist before creating final output dir
|
331
|
+
self._log_progress(f"Creating directory structure", "SETUP_DIRS")
|
332
|
+
out_dir.parent.mkdir(parents=True, exist_ok=True)
|
333
|
+
out_dir.mkdir(parents=True, exist_ok=True) # Create final output dir
|
334
|
+
|
335
|
+
# Ensure core dir exists if different from out_dir
|
336
|
+
if core_dir != out_dir:
|
337
|
+
core_dir.parent.mkdir(parents=True, exist_ok=True)
|
338
|
+
core_dir.mkdir(parents=True, exist_ok=True) # Create final core dir
|
339
|
+
|
340
|
+
# Write root __init__.py if needed (handle nested packages like a.b.c)
|
341
|
+
self._log_progress("Creating __init__.py files for package structure", "INIT_FILES")
|
342
|
+
init_files_created = 0
|
343
|
+
current = out_dir
|
344
|
+
while current != project_root:
|
345
|
+
init_path = current / "__init__.py"
|
346
|
+
if not init_path.exists():
|
347
|
+
init_path.write_text("")
|
348
|
+
init_files_created += 1
|
349
|
+
if current.parent == current: # Avoid infinite loop at root
|
350
|
+
break
|
351
|
+
current = current.parent
|
352
|
+
|
353
|
+
# If core_dir is outside out_dir structure, ensure its __init__.py exist too
|
354
|
+
if not str(core_dir).startswith(str(out_dir)):
|
355
|
+
current = core_dir
|
356
|
+
while current != project_root:
|
357
|
+
init_path = current / "__init__.py"
|
358
|
+
if not init_path.exists():
|
359
|
+
init_path.write_text("")
|
360
|
+
init_files_created += 1
|
361
|
+
if current.parent == current:
|
362
|
+
break
|
363
|
+
current = current.parent
|
364
|
+
|
365
|
+
self._log_progress(f"Created {init_files_created} __init__.py files", "INIT_FILES")
|
366
|
+
|
367
|
+
# --- Generate directly into final destination paths ---
|
368
|
+
self._log_progress("Starting direct file generation", "DIRECT_GEN_FILES")
|
369
|
+
|
370
|
+
# 1. ExceptionsEmitter
|
371
|
+
self._log_progress("Generating exception files", "EMIT_EXCEPTIONS")
|
372
|
+
exceptions_emitter = ExceptionsEmitter(
|
373
|
+
core_package_name=resolved_core_package_fqn,
|
374
|
+
overall_project_root=str(project_root),
|
375
|
+
)
|
376
|
+
exception_files_list, exception_alias_names = exceptions_emitter.emit(ir, str(core_dir))
|
377
|
+
generated_files += [Path(p) for p in exception_files_list]
|
378
|
+
self._log_progress(f"Generated {len(exception_files_list)} exception files", "EMIT_EXCEPTIONS")
|
379
|
+
|
380
|
+
# 2. CoreEmitter
|
381
|
+
self._log_progress("Generating core files", "EMIT_CORE")
|
382
|
+
relative_core_path_for_emitter_init = os.path.relpath(core_dir, out_dir)
|
383
|
+
core_emitter = CoreEmitter(
|
384
|
+
core_dir=str(relative_core_path_for_emitter_init),
|
385
|
+
core_package=resolved_core_package_fqn,
|
386
|
+
exception_alias_names=exception_alias_names,
|
387
|
+
)
|
388
|
+
generated_files += [Path(p) for p in core_emitter.emit(str(out_dir))]
|
389
|
+
self._log_progress(f"Generated {len(core_emitter.emit(str(out_dir)))} core files", "EMIT_CORE")
|
390
|
+
|
391
|
+
# 3. config.py (using FileManager) - REMOVED, CoreEmitter handles this
|
392
|
+
# fm = FileManager()
|
393
|
+
# config_dst = core_dir / "config.py"
|
394
|
+
# config_content = CONFIG_TEMPLATE
|
395
|
+
# fm.write_file(str(config_dst), config_content) # Use FileManager
|
396
|
+
# generated_files.append(config_dst)
|
397
|
+
|
398
|
+
# 4. ModelsEmitter
|
399
|
+
self._log_progress("Generating model files", "EMIT_MODELS")
|
400
|
+
models_emitter = ModelsEmitter(context=main_render_context, parsed_schemas=ir.schemas)
|
401
|
+
model_files_dict = models_emitter.emit(ir, str(out_dir)) # ModelsEmitter.emit now takes IRSpec
|
402
|
+
generated_files += [
|
403
|
+
Path(p) for p_list in model_files_dict.values() for p in p_list
|
404
|
+
] # Flatten list of lists
|
405
|
+
schema_count = len(ir.schemas) if ir.schemas else 0
|
406
|
+
self._log_progress(
|
407
|
+
f"Generated {len(model_files_dict)} model files for {schema_count} schemas",
|
408
|
+
"EMIT_MODELS",
|
409
|
+
)
|
410
|
+
|
411
|
+
# 5. EndpointsEmitter
|
412
|
+
self._log_progress("Generating endpoint files", "EMIT_ENDPOINTS")
|
413
|
+
endpoints_emitter = EndpointsEmitter(context=main_render_context)
|
414
|
+
generated_files += [
|
415
|
+
Path(p) for p in endpoints_emitter.emit(ir.operations, str(out_dir))
|
416
|
+
] # emit takes ir.operations, str output_dir
|
417
|
+
operation_count = len(ir.operations) if ir.operations else 0
|
418
|
+
self._log_progress(
|
419
|
+
f"Generated {len(endpoints_emitter.emit(ir.operations, str(out_dir)))} "
|
420
|
+
f"endpoint files for {operation_count} operations",
|
421
|
+
"EMIT_ENDPOINTS",
|
422
|
+
)
|
423
|
+
|
424
|
+
# 6. ClientEmitter
|
425
|
+
self._log_progress("Generating client file", "EMIT_CLIENT")
|
426
|
+
client_emitter = ClientEmitter(context=main_render_context) # ClientEmitter now takes context
|
427
|
+
client_files = [Path(p) for p in client_emitter.emit(ir, str(out_dir))] # emit takes ir, str output_dir
|
428
|
+
generated_files += client_files
|
429
|
+
self._log_progress(f"Generated {len(client_files)} client files", "EMIT_CLIENT")
|
430
|
+
|
431
|
+
# After all emitters, if core_package is specified (external core),
|
432
|
+
# create a rich __init__.py in the client's output_package (out_dir).
|
433
|
+
if core_package: # core_package is the user-provided original arg
|
434
|
+
client_init_py_path = out_dir / "__init__.py"
|
435
|
+
self._log_progress(
|
436
|
+
f"Generating rich __init__.py for client package at {client_init_py_path}", "CLIENT_INIT"
|
437
|
+
)
|
438
|
+
|
439
|
+
# Core components to re-export.
|
440
|
+
# resolved_core_package_fqn is the correct fully qualified name to use for imports.
|
441
|
+
core_imports = [
|
442
|
+
f"from {resolved_core_package_fqn}.auth import BaseAuth, ApiKeyAuth, BearerAuth, OAuth2Auth",
|
443
|
+
f"from {resolved_core_package_fqn}.config import ClientConfig",
|
444
|
+
f"from {resolved_core_package_fqn}.exceptions import HTTPError, ClientError, ServerError",
|
445
|
+
f"from {resolved_core_package_fqn}.exception_aliases import * # noqa: F401, F403",
|
446
|
+
f"from {resolved_core_package_fqn}.http_transport import HttpTransport, HttpxTransport",
|
447
|
+
f"from {resolved_core_package_fqn}.schemas import BaseSchema",
|
448
|
+
]
|
449
|
+
|
450
|
+
client_imports = [
|
451
|
+
"from .client import APIClient",
|
452
|
+
]
|
453
|
+
|
454
|
+
all_list = [
|
455
|
+
'"APIClient",',
|
456
|
+
'"BaseAuth", "ApiKeyAuth", "BearerAuth", "OAuth2Auth",',
|
457
|
+
'"ClientConfig",',
|
458
|
+
'"HTTPError", "ClientError", "ServerError",',
|
459
|
+
# Names from exception_aliases are available via star import
|
460
|
+
'"HttpTransport", "HttpxTransport",',
|
461
|
+
'"BaseSchema",',
|
462
|
+
]
|
463
|
+
|
464
|
+
init_content_lines = [
|
465
|
+
"# Client package __init__.py",
|
466
|
+
"# Re-exports from core and local client.",
|
467
|
+
"",
|
468
|
+
]
|
469
|
+
init_content_lines.extend(core_imports)
|
470
|
+
init_content_lines.extend(client_imports)
|
471
|
+
init_content_lines.append("")
|
472
|
+
init_content_lines.append("__all__ = [")
|
473
|
+
for item in all_list:
|
474
|
+
init_content_lines.append(f" {item}")
|
475
|
+
init_content_lines.append("]")
|
476
|
+
init_content_lines.append("") # Trailing newline
|
477
|
+
|
478
|
+
# Use FileManager from the main_render_context if available, or create one.
|
479
|
+
# For simplicity here, just write directly.
|
480
|
+
try:
|
481
|
+
with open(client_init_py_path, "w") as f:
|
482
|
+
f.write("\\n".join(init_content_lines))
|
483
|
+
generated_files.append(client_init_py_path) # Track this generated file
|
484
|
+
self._log_progress(f"Successfully wrote rich __init__.py to {client_init_py_path}", "CLIENT_INIT")
|
485
|
+
except IOError as e:
|
486
|
+
self._log_progress(f"ERROR: Failed to write client __init__.py: {e}", "CLIENT_INIT")
|
487
|
+
# Optionally re-raise or handle as a generation failure
|
488
|
+
raise GenerationError(f"Failed to write client __init__.py: {e}") from e
|
489
|
+
|
490
|
+
# Post-processing applies to all generated files
|
491
|
+
if not no_postprocess:
|
492
|
+
self._log_progress("Running post-processing on generated files", "POSTPROCESS")
|
493
|
+
PostprocessManager(str(project_root)).run([str(p) for p in generated_files])
|
494
|
+
self._log_progress(f"Post-processed {len(generated_files)} files", "POSTPROCESS")
|
495
|
+
|
496
|
+
total_time = time.time() - self.start_time
|
497
|
+
self._log_progress(
|
498
|
+
f"Code generation completed successfully in {total_time:.2f}s, generated {len(generated_files)} files",
|
499
|
+
"GENERATION",
|
500
|
+
)
|
501
|
+
|
502
|
+
# Print timing summary if verbose
|
503
|
+
if self.verbose:
|
504
|
+
self._log_progress("=== Generation Summary ===", None)
|
505
|
+
for stage, start_time in sorted(self.timings.items()):
|
506
|
+
# Only include stages that have both start and end times
|
507
|
+
if f"{stage}_COMPLETE" in self.timings or stage in self.timings:
|
508
|
+
end_time = self.timings.get(f"{stage}_COMPLETE", time.time())
|
509
|
+
duration = end_time - start_time
|
510
|
+
self._log_progress(f"{stage}: {duration:.2f}s", None)
|
511
|
+
|
512
|
+
return generated_files
|
513
|
+
|
514
|
+
def _load_spec(self, path_or_url: str) -> dict[str, Any]:
|
515
|
+
"""
|
516
|
+
Load a spec from a file path or URL.
|
517
|
+
Args:
|
518
|
+
path_or_url (str): Path or URL to the OpenAPI spec.
|
519
|
+
Returns:
|
520
|
+
dict[str, Any]: Parsed OpenAPI spec.
|
521
|
+
Raises:
|
522
|
+
GenerationError: If loading fails or URL loading is not implemented.
|
523
|
+
"""
|
524
|
+
spec_path_obj = Path(path_or_url)
|
525
|
+
if spec_path_obj.exists() and spec_path_obj.is_file(): # Added is_file() check
|
526
|
+
import yaml
|
527
|
+
|
528
|
+
data = yaml.safe_load(spec_path_obj.read_text())
|
529
|
+
if not isinstance(data, dict):
|
530
|
+
raise GenerationError("Loaded spec is not a dictionary.")
|
531
|
+
return data
|
532
|
+
elif not spec_path_obj.exists():
|
533
|
+
raise GenerationError(f"Specification file not found at {path_or_url}")
|
534
|
+
elif not spec_path_obj.is_file():
|
535
|
+
raise GenerationError(f"Specified path {path_or_url} is not a file.")
|
536
|
+
else: # Fallback, should ideally not be reached with current checks
|
537
|
+
raise GenerationError(
|
538
|
+
f"Failed to load spec from {path_or_url}. URL loading not implemented or path is invalid."
|
539
|
+
)
|
540
|
+
|
541
|
+
def _show_diffs(self, old_dir: str, new_dir: str) -> bool:
|
542
|
+
"""
|
543
|
+
Compare two directories and print diffs, returning True if any differences.
|
544
|
+
Args:
|
545
|
+
old_dir (str): Path to the old directory.
|
546
|
+
new_dir (str): Path to the new directory.
|
547
|
+
Returns:
|
548
|
+
bool: True if differences are found, False otherwise.
|
549
|
+
"""
|
550
|
+
import difflib
|
551
|
+
|
552
|
+
has_diff = False
|
553
|
+
for new_file in Path(new_dir).rglob("*.py"):
|
554
|
+
old_file = Path(old_dir) / new_file.relative_to(new_dir)
|
555
|
+
if old_file.exists():
|
556
|
+
old_lines = old_file.read_text().splitlines()
|
557
|
+
new_lines = new_file.read_text().splitlines()
|
558
|
+
diff = list(difflib.unified_diff(old_lines, new_lines, fromfile=str(old_file), tofile=str(new_file)))
|
559
|
+
if diff:
|
560
|
+
has_diff = True
|
561
|
+
print("\n".join(diff))
|
562
|
+
return has_diff
|
@@ -0,0 +1 @@
|
|
1
|
+
# This file makes 'helpers' a package.
|