pyopenapi-gen 0.8.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. pyopenapi_gen/__init__.py +114 -0
  2. pyopenapi_gen/__main__.py +6 -0
  3. pyopenapi_gen/cli.py +86 -0
  4. pyopenapi_gen/context/file_manager.py +52 -0
  5. pyopenapi_gen/context/import_collector.py +382 -0
  6. pyopenapi_gen/context/render_context.py +630 -0
  7. pyopenapi_gen/core/__init__.py +0 -0
  8. pyopenapi_gen/core/auth/base.py +22 -0
  9. pyopenapi_gen/core/auth/plugins.py +89 -0
  10. pyopenapi_gen/core/exceptions.py +25 -0
  11. pyopenapi_gen/core/http_transport.py +219 -0
  12. pyopenapi_gen/core/loader/__init__.py +12 -0
  13. pyopenapi_gen/core/loader/loader.py +158 -0
  14. pyopenapi_gen/core/loader/operations/__init__.py +12 -0
  15. pyopenapi_gen/core/loader/operations/parser.py +155 -0
  16. pyopenapi_gen/core/loader/operations/post_processor.py +60 -0
  17. pyopenapi_gen/core/loader/operations/request_body.py +85 -0
  18. pyopenapi_gen/core/loader/parameters/__init__.py +10 -0
  19. pyopenapi_gen/core/loader/parameters/parser.py +121 -0
  20. pyopenapi_gen/core/loader/responses/__init__.py +10 -0
  21. pyopenapi_gen/core/loader/responses/parser.py +104 -0
  22. pyopenapi_gen/core/loader/schemas/__init__.py +11 -0
  23. pyopenapi_gen/core/loader/schemas/extractor.py +184 -0
  24. pyopenapi_gen/core/pagination.py +64 -0
  25. pyopenapi_gen/core/parsing/__init__.py +13 -0
  26. pyopenapi_gen/core/parsing/common/__init__.py +1 -0
  27. pyopenapi_gen/core/parsing/common/ref_resolution/__init__.py +9 -0
  28. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/__init__.py +0 -0
  29. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/cyclic_properties.py +66 -0
  30. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/direct_cycle.py +33 -0
  31. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/existing_schema.py +22 -0
  32. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/list_response.py +54 -0
  33. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/missing_ref.py +52 -0
  34. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/new_schema.py +50 -0
  35. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/stripped_suffix.py +51 -0
  36. pyopenapi_gen/core/parsing/common/ref_resolution/resolve_schema_ref.py +86 -0
  37. pyopenapi_gen/core/parsing/common/type_parser.py +74 -0
  38. pyopenapi_gen/core/parsing/context.py +184 -0
  39. pyopenapi_gen/core/parsing/cycle_helpers.py +123 -0
  40. pyopenapi_gen/core/parsing/keywords/__init__.py +1 -0
  41. pyopenapi_gen/core/parsing/keywords/all_of_parser.py +77 -0
  42. pyopenapi_gen/core/parsing/keywords/any_of_parser.py +79 -0
  43. pyopenapi_gen/core/parsing/keywords/array_items_parser.py +69 -0
  44. pyopenapi_gen/core/parsing/keywords/one_of_parser.py +72 -0
  45. pyopenapi_gen/core/parsing/keywords/properties_parser.py +98 -0
  46. pyopenapi_gen/core/parsing/schema_finalizer.py +166 -0
  47. pyopenapi_gen/core/parsing/schema_parser.py +610 -0
  48. pyopenapi_gen/core/parsing/transformers/__init__.py +0 -0
  49. pyopenapi_gen/core/parsing/transformers/inline_enum_extractor.py +285 -0
  50. pyopenapi_gen/core/parsing/transformers/inline_object_promoter.py +117 -0
  51. pyopenapi_gen/core/parsing/unified_cycle_detection.py +293 -0
  52. pyopenapi_gen/core/postprocess_manager.py +161 -0
  53. pyopenapi_gen/core/schemas.py +40 -0
  54. pyopenapi_gen/core/streaming_helpers.py +86 -0
  55. pyopenapi_gen/core/telemetry.py +67 -0
  56. pyopenapi_gen/core/utils.py +409 -0
  57. pyopenapi_gen/core/warning_collector.py +83 -0
  58. pyopenapi_gen/core/writers/code_writer.py +135 -0
  59. pyopenapi_gen/core/writers/documentation_writer.py +222 -0
  60. pyopenapi_gen/core/writers/line_writer.py +217 -0
  61. pyopenapi_gen/core/writers/python_construct_renderer.py +274 -0
  62. pyopenapi_gen/core_package_template/README.md +21 -0
  63. pyopenapi_gen/emit/models_emitter.py +143 -0
  64. pyopenapi_gen/emitters/client_emitter.py +51 -0
  65. pyopenapi_gen/emitters/core_emitter.py +181 -0
  66. pyopenapi_gen/emitters/docs_emitter.py +44 -0
  67. pyopenapi_gen/emitters/endpoints_emitter.py +223 -0
  68. pyopenapi_gen/emitters/exceptions_emitter.py +52 -0
  69. pyopenapi_gen/emitters/models_emitter.py +428 -0
  70. pyopenapi_gen/generator/client_generator.py +562 -0
  71. pyopenapi_gen/helpers/__init__.py +1 -0
  72. pyopenapi_gen/helpers/endpoint_utils.py +552 -0
  73. pyopenapi_gen/helpers/type_cleaner.py +341 -0
  74. pyopenapi_gen/helpers/type_helper.py +112 -0
  75. pyopenapi_gen/helpers/type_resolution/__init__.py +1 -0
  76. pyopenapi_gen/helpers/type_resolution/array_resolver.py +57 -0
  77. pyopenapi_gen/helpers/type_resolution/composition_resolver.py +79 -0
  78. pyopenapi_gen/helpers/type_resolution/finalizer.py +89 -0
  79. pyopenapi_gen/helpers/type_resolution/named_resolver.py +174 -0
  80. pyopenapi_gen/helpers/type_resolution/object_resolver.py +212 -0
  81. pyopenapi_gen/helpers/type_resolution/primitive_resolver.py +57 -0
  82. pyopenapi_gen/helpers/type_resolution/resolver.py +48 -0
  83. pyopenapi_gen/helpers/url_utils.py +14 -0
  84. pyopenapi_gen/http_types.py +20 -0
  85. pyopenapi_gen/ir.py +167 -0
  86. pyopenapi_gen/py.typed +1 -0
  87. pyopenapi_gen/types/__init__.py +11 -0
  88. pyopenapi_gen/types/contracts/__init__.py +13 -0
  89. pyopenapi_gen/types/contracts/protocols.py +106 -0
  90. pyopenapi_gen/types/contracts/types.py +30 -0
  91. pyopenapi_gen/types/resolvers/__init__.py +7 -0
  92. pyopenapi_gen/types/resolvers/reference_resolver.py +71 -0
  93. pyopenapi_gen/types/resolvers/response_resolver.py +203 -0
  94. pyopenapi_gen/types/resolvers/schema_resolver.py +367 -0
  95. pyopenapi_gen/types/services/__init__.py +5 -0
  96. pyopenapi_gen/types/services/type_service.py +133 -0
  97. pyopenapi_gen/visit/client_visitor.py +228 -0
  98. pyopenapi_gen/visit/docs_visitor.py +38 -0
  99. pyopenapi_gen/visit/endpoint/__init__.py +1 -0
  100. pyopenapi_gen/visit/endpoint/endpoint_visitor.py +103 -0
  101. pyopenapi_gen/visit/endpoint/generators/__init__.py +1 -0
  102. pyopenapi_gen/visit/endpoint/generators/docstring_generator.py +121 -0
  103. pyopenapi_gen/visit/endpoint/generators/endpoint_method_generator.py +87 -0
  104. pyopenapi_gen/visit/endpoint/generators/request_generator.py +103 -0
  105. pyopenapi_gen/visit/endpoint/generators/response_handler_generator.py +497 -0
  106. pyopenapi_gen/visit/endpoint/generators/signature_generator.py +88 -0
  107. pyopenapi_gen/visit/endpoint/generators/url_args_generator.py +183 -0
  108. pyopenapi_gen/visit/endpoint/processors/__init__.py +1 -0
  109. pyopenapi_gen/visit/endpoint/processors/import_analyzer.py +76 -0
  110. pyopenapi_gen/visit/endpoint/processors/parameter_processor.py +171 -0
  111. pyopenapi_gen/visit/exception_visitor.py +52 -0
  112. pyopenapi_gen/visit/model/__init__.py +0 -0
  113. pyopenapi_gen/visit/model/alias_generator.py +89 -0
  114. pyopenapi_gen/visit/model/dataclass_generator.py +197 -0
  115. pyopenapi_gen/visit/model/enum_generator.py +200 -0
  116. pyopenapi_gen/visit/model/model_visitor.py +197 -0
  117. pyopenapi_gen/visit/visitor.py +97 -0
  118. pyopenapi_gen-0.8.3.dist-info/METADATA +224 -0
  119. pyopenapi_gen-0.8.3.dist-info/RECORD +122 -0
  120. pyopenapi_gen-0.8.3.dist-info/WHEEL +4 -0
  121. pyopenapi_gen-0.8.3.dist-info/entry_points.txt +2 -0
  122. pyopenapi_gen-0.8.3.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,293 @@
1
+ """
2
+ Unified cycle detection system for schema parsing.
3
+
4
+ This module provides a comprehensive, conflict-free approach to cycle detection
5
+ that handles structural cycles, processing cycles, and depth limits consistently.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import logging
11
+ from dataclasses import dataclass, field
12
+ from enum import Enum
13
+ from typing import Dict, List, Optional, Set
14
+
15
+ from pyopenapi_gen import IRSchema
16
+ from pyopenapi_gen.core.utils import NameSanitizer
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class SchemaState(Enum):
22
+ """States a schema can be in during parsing."""
23
+
24
+ NOT_STARTED = "not_started"
25
+ IN_PROGRESS = "in_progress"
26
+ COMPLETED = "completed"
27
+ PLACEHOLDER_CYCLE = "placeholder_cycle"
28
+ PLACEHOLDER_DEPTH = "placeholder_depth"
29
+ PLACEHOLDER_SELF_REF = "placeholder_self_ref"
30
+
31
+
32
+ class CycleType(Enum):
33
+ """Types of cycles that can be detected."""
34
+
35
+ STRUCTURAL = "structural" # Schema references form a loop
36
+ SELF_REFERENCE = "self_reference" # Schema directly references itself
37
+ MAX_DEPTH = "max_depth" # Recursion depth limit exceeded
38
+
39
+
40
+ class CycleAction(Enum):
41
+ """Actions to take when cycle is detected."""
42
+
43
+ CONTINUE_PARSING = "continue" # No cycle or allowed cycle
44
+ RETURN_PLACEHOLDER = "placeholder" # Return pre-made placeholder
45
+ CREATE_PLACEHOLDER = "create" # Create new placeholder
46
+ RETURN_EXISTING = "existing" # Return existing parsed schema
47
+
48
+
49
+ @dataclass
50
+ class CycleInfo:
51
+ """Information about a detected cycle."""
52
+
53
+ schema_name: str
54
+ cycle_path: List[str]
55
+ cycle_type: CycleType
56
+ is_direct_self_reference: bool
57
+ depth_when_detected: int
58
+
59
+
60
+ @dataclass
61
+ class CycleDetectionResult:
62
+ """Result of cycle detection check."""
63
+
64
+ is_cycle: bool
65
+ cycle_type: Optional[CycleType]
66
+ action: CycleAction
67
+ cycle_info: Optional[CycleInfo] = None
68
+ placeholder_schema: Optional[IRSchema] = None
69
+
70
+
71
+ @dataclass
72
+ class UnifiedCycleContext:
73
+ """Unified context for all cycle detection mechanisms."""
74
+
75
+ # Core tracking
76
+ schema_stack: List[str] = field(default_factory=list)
77
+ schema_states: Dict[str, SchemaState] = field(default_factory=dict)
78
+ parsed_schemas: Dict[str, IRSchema] = field(default_factory=dict)
79
+ recursion_depth: int = 0
80
+
81
+ # Detection results
82
+ detected_cycles: List[CycleInfo] = field(default_factory=list)
83
+ depth_exceeded_schemas: Set[str] = field(default_factory=set)
84
+ cycle_detected: bool = False # Global flag for backward compatibility
85
+
86
+ # Configuration
87
+ max_depth: int = 150
88
+ allow_self_reference: bool = False
89
+
90
+
91
+ def analyze_cycle(schema_name: str, schema_stack: List[str]) -> CycleInfo:
92
+ """Analyze a detected cycle to determine its characteristics."""
93
+ try:
94
+ start_index = schema_stack.index(schema_name)
95
+ cycle_path = schema_stack[start_index:] + [schema_name]
96
+ except ValueError:
97
+ # Schema not in stack - shouldn't happen, but handle gracefully
98
+ cycle_path = [schema_name, schema_name]
99
+
100
+ is_direct_self_reference = len(cycle_path) == 2 and cycle_path[0] == cycle_path[1]
101
+
102
+ cycle_type = CycleType.SELF_REFERENCE if is_direct_self_reference else CycleType.STRUCTURAL
103
+
104
+ return CycleInfo(
105
+ schema_name=schema_name,
106
+ cycle_path=cycle_path,
107
+ cycle_type=cycle_type,
108
+ is_direct_self_reference=is_direct_self_reference,
109
+ depth_when_detected=len(schema_stack),
110
+ )
111
+
112
+
113
+ def create_cycle_placeholder(schema_name: str, cycle_info: CycleInfo) -> IRSchema:
114
+ """Create a placeholder IRSchema for cycle detection."""
115
+ sanitized_name = NameSanitizer.sanitize_class_name(schema_name)
116
+ cycle_path_str = " -> ".join(cycle_info.cycle_path)
117
+
118
+ return IRSchema(
119
+ name=sanitized_name,
120
+ type="object",
121
+ description=f"[Circular reference detected: {cycle_path_str}]",
122
+ _from_unresolved_ref=True,
123
+ _circular_ref_path=cycle_path_str,
124
+ _is_circular_ref=True,
125
+ )
126
+
127
+
128
+ def create_self_ref_placeholder(schema_name: str, cycle_info: CycleInfo) -> IRSchema:
129
+ """Create a placeholder IRSchema for allowed self-reference."""
130
+ sanitized_name = NameSanitizer.sanitize_class_name(schema_name)
131
+
132
+ return IRSchema(
133
+ name=sanitized_name,
134
+ type="object",
135
+ description=f"[Self-referencing schema: {schema_name}]",
136
+ _is_self_referential_stub=True,
137
+ )
138
+
139
+
140
+ def create_depth_placeholder(schema_name: str, depth: int) -> IRSchema:
141
+ """Create a placeholder IRSchema for max depth exceeded."""
142
+ sanitized_name = NameSanitizer.sanitize_class_name(schema_name)
143
+ description = f"[Maximum recursion depth ({depth}) exceeded for '{schema_name}']"
144
+
145
+ # Import cycle_helpers to use its logging functionality
146
+ from .cycle_helpers import logger as cycle_helpers_logger
147
+
148
+ cycle_helpers_logger.warning(description)
149
+
150
+ return IRSchema(
151
+ name=sanitized_name,
152
+ type="object",
153
+ description=description,
154
+ _max_depth_exceeded_marker=True,
155
+ )
156
+
157
+
158
+ def unified_cycle_check(schema_name: Optional[str], context: UnifiedCycleContext) -> CycleDetectionResult:
159
+ """Unified cycle detection that handles all cases."""
160
+
161
+ if schema_name is None:
162
+ return CycleDetectionResult(False, None, CycleAction.CONTINUE_PARSING)
163
+
164
+ # Check current state
165
+ current_state = context.schema_states.get(schema_name, SchemaState.NOT_STARTED)
166
+
167
+ # 1. If already completed, reuse (no cycle)
168
+ if current_state == SchemaState.COMPLETED:
169
+ return CycleDetectionResult(False, None, CycleAction.RETURN_EXISTING)
170
+
171
+ # 2. If already a placeholder, reuse it
172
+ if current_state in [
173
+ SchemaState.PLACEHOLDER_CYCLE,
174
+ SchemaState.PLACEHOLDER_DEPTH,
175
+ SchemaState.PLACEHOLDER_SELF_REF,
176
+ ]:
177
+ return CycleDetectionResult(True, None, CycleAction.RETURN_PLACEHOLDER)
178
+
179
+ # 3. Check depth limit BEFORE checking cycles (dynamically check environment)
180
+ import os
181
+
182
+ max_depth = int(os.environ.get("PYOPENAPI_MAX_DEPTH", context.max_depth))
183
+ if context.recursion_depth > max_depth:
184
+ context.depth_exceeded_schemas.add(schema_name)
185
+ context.schema_states[schema_name] = SchemaState.PLACEHOLDER_DEPTH
186
+ context.cycle_detected = True # Max depth exceeded is considered a form of cycle detection
187
+ placeholder = create_depth_placeholder(schema_name, max_depth)
188
+ context.parsed_schemas[schema_name] = placeholder
189
+ return CycleDetectionResult(
190
+ True, CycleType.MAX_DEPTH, CycleAction.CREATE_PLACEHOLDER, placeholder_schema=placeholder
191
+ )
192
+
193
+ # 4. Check for structural cycle
194
+ if schema_name in context.schema_stack:
195
+ cycle_info = analyze_cycle(schema_name, context.schema_stack)
196
+ context.cycle_detected = True
197
+
198
+ # For cycles, create a placeholder for the re-entrant reference, not the original schema
199
+ # This allows the original schema parsing to complete normally
200
+ # The re-entrant reference gets a circular placeholder
201
+
202
+ # Create a unique key for this specific cycle reference
203
+ cycle_ref_key = f"{schema_name}_cycle_ref_{len(context.detected_cycles)}"
204
+
205
+ # Determine if cycle is allowed
206
+ if context.allow_self_reference and cycle_info.is_direct_self_reference:
207
+ placeholder = create_self_ref_placeholder(schema_name, cycle_info)
208
+ else:
209
+ context.detected_cycles.append(cycle_info)
210
+ placeholder = create_cycle_placeholder(schema_name, cycle_info)
211
+
212
+ # Determine storage policy based on cycle characteristics
213
+ is_synthetic_schema = schema_name and (
214
+ "Item" in schema_name or "Property" in schema_name # Array item schemas # Property schemas
215
+ )
216
+
217
+ # Check for specific known patterns
218
+ cycle_path_str = " -> ".join(cycle_info.cycle_path)
219
+ is_direct_array_self_ref = (
220
+ "Children" in cycle_path_str
221
+ and "ChildrenItem" in cycle_path_str
222
+ and cycle_info.cycle_path[0] == cycle_info.cycle_path[-1]
223
+ )
224
+ is_nested_property_self_ref = (
225
+ any(
226
+ name.startswith(schema_name) and name != schema_name and not name.endswith("Item")
227
+ for name in cycle_info.cycle_path
228
+ )
229
+ and cycle_info.cycle_path[0] == cycle_info.cycle_path[-1]
230
+ )
231
+
232
+ should_store_placeholder = (
233
+ is_synthetic_schema
234
+ or cycle_info.is_direct_self_reference
235
+ or is_direct_array_self_ref
236
+ or is_nested_property_self_ref
237
+ )
238
+
239
+ if should_store_placeholder:
240
+ context.parsed_schemas[schema_name] = placeholder
241
+ # Mark schema state appropriately
242
+ if context.allow_self_reference and cycle_info.is_direct_self_reference:
243
+ context.schema_states[schema_name] = SchemaState.PLACEHOLDER_SELF_REF
244
+ else:
245
+ context.schema_states[schema_name] = SchemaState.PLACEHOLDER_CYCLE
246
+
247
+ # Don't mark the original schema as a placeholder - just return the placeholder for this reference
248
+ return CycleDetectionResult(
249
+ True,
250
+ (
251
+ cycle_info.cycle_type
252
+ if not (context.allow_self_reference and cycle_info.is_direct_self_reference)
253
+ else CycleType.SELF_REFERENCE
254
+ ),
255
+ CycleAction.CREATE_PLACEHOLDER,
256
+ cycle_info=cycle_info,
257
+ placeholder_schema=placeholder,
258
+ )
259
+
260
+ # 5. No cycle detected - proceed with parsing
261
+ context.schema_states[schema_name] = SchemaState.IN_PROGRESS
262
+ return CycleDetectionResult(False, None, CycleAction.CONTINUE_PARSING)
263
+
264
+
265
+ def unified_enter_schema(schema_name: Optional[str], context: UnifiedCycleContext) -> CycleDetectionResult:
266
+ """Unified entry point that always maintains consistent state."""
267
+ context.recursion_depth += 1
268
+
269
+ result = unified_cycle_check(schema_name, context)
270
+
271
+ # Only add to stack if we're going to continue parsing
272
+ if result.action == CycleAction.CONTINUE_PARSING and schema_name:
273
+ context.schema_stack.append(schema_name)
274
+
275
+ return result
276
+
277
+
278
+ def unified_exit_schema(schema_name: Optional[str], context: UnifiedCycleContext) -> None:
279
+ """Unified exit that always maintains consistent state."""
280
+ if context.recursion_depth > 0:
281
+ context.recursion_depth -= 1
282
+
283
+ if schema_name and schema_name in context.schema_stack:
284
+ context.schema_stack.remove(schema_name)
285
+
286
+ # Mark as completed if it was in progress (but don't change placeholder states)
287
+ if schema_name and context.schema_states.get(schema_name) == SchemaState.IN_PROGRESS:
288
+ context.schema_states[schema_name] = SchemaState.COMPLETED
289
+
290
+
291
+ def get_schema_or_placeholder(schema_name: str, context: UnifiedCycleContext) -> Optional[IRSchema]:
292
+ """Get an existing schema or placeholder from the context."""
293
+ return context.parsed_schemas.get(schema_name)
@@ -0,0 +1,161 @@
1
+ import subprocess
2
+ import sys
3
+ from pathlib import Path
4
+ from typing import List, Union
5
+
6
+ SUCCESS_LINE = "Success: no issues found in 1 source file"
7
+
8
+
9
+ def _print_filtered_stdout(stdout: str) -> None:
10
+ lines = [line for line in stdout.splitlines() if line.strip() and line.strip() != SUCCESS_LINE]
11
+ if lines:
12
+ print("\n".join(lines))
13
+
14
+
15
+ class PostprocessManager:
16
+ """
17
+ Handles post-processing of generated Python files: import cleanup, formatting, and type checking.
18
+ Can be used programmatically or as a script.
19
+ """
20
+
21
+ def __init__(self, project_root: str):
22
+ self.project_root = project_root # Store project root
23
+ pass
24
+
25
+ def run(self, targets: List[Union[str, Path]]) -> None:
26
+ """
27
+ Run Ruff checks on individual files, then run Mypy on the package root.
28
+ """
29
+ if not targets:
30
+ return
31
+
32
+ # Ensure all targets are Path objects
33
+ target_paths = [Path(t) for t in targets]
34
+
35
+ # --- RE-ENABLE RUFF CHECKS ---
36
+ for target_path in target_paths:
37
+ if target_path.is_file():
38
+ self.remove_unused_imports(target_path)
39
+ self.sort_imports(target_path)
40
+ self.format_code(target_path)
41
+ # --- END RE-ENABLE ---
42
+
43
+ # Determine the package root directory(s) for Mypy
44
+ package_roots = set()
45
+ for target_path in target_paths:
46
+ if target_path.is_file():
47
+ # Find the first ancestor directory *without* __init__.py
48
+ # (or stop at workspace root)
49
+ current = target_path.parent
50
+ package_root = current
51
+ while current != Path(self.project_root) and (current / "__init__.py").exists():
52
+ package_root = current
53
+ current = current.parent
54
+ package_roots.add(package_root)
55
+ elif target_path.is_dir():
56
+ # If a directory is passed, assume it's a package root or contains packages
57
+ # For simplicity, let's assume it *is* the root to run mypy on
58
+ package_roots.add(target_path)
59
+
60
+ # Run Mypy on each identified package root
61
+ if package_roots:
62
+ print(f"Running Mypy on package root(s): {package_roots}")
63
+ for root_dir in package_roots:
64
+ print(f"Running mypy on {root_dir}...")
65
+ self.type_check(root_dir)
66
+
67
+ def remove_unused_imports(self, target: Union[str, Path]) -> None:
68
+ """Remove unused imports from the target using Ruff."""
69
+ result = subprocess.run(
70
+ [
71
+ sys.executable,
72
+ "-m",
73
+ "ruff",
74
+ "check",
75
+ "--select=F401",
76
+ "--fix",
77
+ str(target),
78
+ ],
79
+ stdout=subprocess.PIPE,
80
+ stderr=subprocess.PIPE,
81
+ text=True,
82
+ )
83
+ if result.returncode != 0 or result.stderr:
84
+ if result.stdout:
85
+ _print_filtered_stdout(result.stdout)
86
+ if result.stderr:
87
+ print(result.stderr, file=sys.stderr)
88
+
89
+ def sort_imports(self, target: Union[str, Path]) -> None:
90
+ """Sort imports in the target using Ruff."""
91
+ result = subprocess.run(
92
+ [
93
+ sys.executable,
94
+ "-m",
95
+ "ruff",
96
+ "check",
97
+ "--select=I",
98
+ "--fix",
99
+ str(target),
100
+ ],
101
+ stdout=subprocess.PIPE,
102
+ stderr=subprocess.PIPE,
103
+ text=True,
104
+ )
105
+ if result.returncode != 0 or result.stderr:
106
+ if result.stdout:
107
+ _print_filtered_stdout(result.stdout)
108
+ if result.stderr:
109
+ print(result.stderr, file=sys.stderr)
110
+
111
+ def format_code(self, target: Union[str, Path]) -> None:
112
+ """Format code in the target using Ruff."""
113
+ result = subprocess.run(
114
+ [
115
+ sys.executable,
116
+ "-m",
117
+ "ruff",
118
+ "format",
119
+ str(target),
120
+ ],
121
+ stdout=subprocess.PIPE,
122
+ stderr=subprocess.PIPE,
123
+ text=True,
124
+ )
125
+ if result.returncode != 0 or result.stderr:
126
+ if result.stdout:
127
+ _print_filtered_stdout(result.stdout)
128
+ if result.stderr:
129
+ print(result.stderr, file=sys.stderr)
130
+ print(f"Formatting found and fixed issues in {target}.", file=sys.stderr)
131
+
132
+ def type_check(self, target_dir: Path) -> None:
133
+ """Type check the target directory using mypy."""
134
+ if not target_dir.is_dir():
135
+ print(f"Skipping Mypy on non-directory: {target_dir}", file=sys.stderr)
136
+ return
137
+
138
+ print(f"Running mypy on {target_dir}...")
139
+ result = subprocess.run(
140
+ [sys.executable, "-m", "mypy", str(target_dir), "--strict"],
141
+ stdout=subprocess.PIPE,
142
+ stderr=subprocess.PIPE,
143
+ text=True,
144
+ )
145
+ if result.stdout or result.stderr or result.returncode != 0:
146
+ if result.stdout:
147
+ print(result.stdout)
148
+ if result.stderr:
149
+ print(result.stderr, file=sys.stderr)
150
+ if result.returncode != 0:
151
+ print(f"Type checking failed for {target_dir}. Please fix the above issues.", file=sys.stderr)
152
+ sys.exit(result.returncode)
153
+
154
+
155
+ if __name__ == "__main__":
156
+ import argparse
157
+
158
+ parser = argparse.ArgumentParser(description="Postprocess generated Python files/directories.")
159
+ parser.add_argument("targets", nargs="+", help="Files or directories to postprocess.")
160
+ args = parser.parse_args()
161
+ PostprocessManager(args.project_root).run(args.targets)
@@ -0,0 +1,40 @@
1
+ from dataclasses import MISSING, dataclass, fields
2
+ from typing import Any, Dict, Type, TypeVar
3
+
4
+ T = TypeVar("T")
5
+
6
+
7
+ @dataclass
8
+ class BaseSchema:
9
+ """Base class for all generated Pydantic models, providing basic validation and dict conversion."""
10
+
11
+ @classmethod
12
+ def model_validate(cls: Type[T], data: Dict[str, Any]) -> T:
13
+ """Validate and create an instance from a dictionary, akin to Pydantic's model_validate."""
14
+ if not isinstance(data, dict):
15
+ raise TypeError(f"Input must be a dictionary, got {type(data).__name__}")
16
+
17
+ kwargs: Dict[str, Any] = {}
18
+ cls_fields = {f.name: f for f in fields(cls)} # type: ignore[arg-type]
19
+
20
+ for field_name, field_def in cls_fields.items():
21
+ if field_name in data:
22
+ kwargs[field_name] = data[field_name]
23
+ elif field_def.default is MISSING and field_def.default_factory is MISSING:
24
+ raise ValueError(f"Missing required field: '{field_name}' for class {cls.__name__}")
25
+
26
+ extra_fields = set(data.keys()) - set(cls_fields.keys())
27
+ if extra_fields:
28
+ pass
29
+
30
+ return cls(**kwargs)
31
+
32
+ def model_dump(self, exclude_none: bool = False) -> Dict[str, Any]:
33
+ """Convert the model instance to a dictionary, akin to Pydantic's model_dump."""
34
+ result = {}
35
+ for field_def in fields(self):
36
+ value = getattr(self, field_def.name)
37
+ if exclude_none and value is None:
38
+ continue
39
+ result[field_def.name] = value
40
+ return result
@@ -0,0 +1,86 @@
1
+ import json
2
+ from typing import Any, AsyncIterator, List, Optional
3
+
4
+ import httpx
5
+
6
+
7
+ class SSEEvent:
8
+ def __init__(
9
+ self, data: str, event: Optional[str] = None, id: Optional[str] = None, retry: Optional[int] = None
10
+ ) -> None:
11
+ self.data: str = data
12
+ self.event: Optional[str] = event
13
+ self.id: Optional[str] = id
14
+ self.retry: Optional[int] = retry
15
+
16
+ def __repr__(self) -> str:
17
+ return f"SSEEvent(data={self.data!r}, event={self.event!r}, id={self.id!r}, retry={self.retry!r})"
18
+
19
+
20
+ async def iter_bytes(response: httpx.Response) -> AsyncIterator[bytes]:
21
+ async for chunk in response.aiter_bytes():
22
+ yield chunk
23
+
24
+
25
+ async def iter_ndjson(response: httpx.Response) -> AsyncIterator[Any]:
26
+ async for line in response.aiter_lines():
27
+ line = line.strip()
28
+ if line:
29
+ yield json.loads(line)
30
+
31
+
32
+ async def iter_sse(response: httpx.Response) -> AsyncIterator[SSEEvent]:
33
+ """Parse Server-Sent Events (SSE) from a streaming response."""
34
+ event_lines: list[str] = []
35
+ async for line in response.aiter_lines():
36
+ if line == "":
37
+ # End of event
38
+ if event_lines:
39
+ event = _parse_sse_event(event_lines)
40
+ if event:
41
+ yield event
42
+ event_lines = []
43
+ else:
44
+ event_lines.append(line)
45
+ # Last event (if any)
46
+ if event_lines:
47
+ event = _parse_sse_event(event_lines)
48
+ if event:
49
+ yield event
50
+
51
+
52
+ def _parse_sse_event(lines: List[str]) -> SSEEvent:
53
+ data = []
54
+ event = None
55
+ id = None
56
+ retry = None
57
+ for line in lines:
58
+ if line.startswith(":"):
59
+ continue # comment
60
+ if ":" in line:
61
+ field, value = line.split(":", 1)
62
+ value = value.lstrip()
63
+ if field == "data":
64
+ data.append(value)
65
+ elif field == "event":
66
+ event = value
67
+ elif field == "id":
68
+ id = value
69
+ elif field == "retry":
70
+ try:
71
+ retry = int(value)
72
+ except ValueError:
73
+ pass
74
+ return SSEEvent(data="\n".join(data), event=event, id=id, retry=retry)
75
+
76
+
77
+ async def iter_sse_events_text(response: httpx.Response) -> AsyncIterator[str]:
78
+ """
79
+ Parses a Server-Sent Events (SSE) stream and yields the `data` field content
80
+ as a string for each event.
81
+ This is specifically for cases where the event data is expected to be a
82
+ single text payload (e.g., a JSON string) per event.
83
+ """
84
+ async for sse_event in iter_sse(response):
85
+ if sse_event.data: # Ensure data is not empty
86
+ yield sse_event.data
@@ -0,0 +1,67 @@
1
+ """
2
+ Telemetry client for usage tracking and analytics.
3
+
4
+ This module provides the TelemetryClient class, which handles anonymous
5
+ usage telemetry for PyOpenAPI Generator. Telemetry is opt-in only.
6
+ """
7
+
8
+ import json
9
+ import os
10
+ import time
11
+ from typing import Any, Dict, Optional
12
+
13
+
14
+ class TelemetryClient:
15
+ """
16
+ Client for sending opt-in telemetry events.
17
+
18
+ This class handles emitting usage events to understand how the generator
19
+ is being used. Telemetry is disabled by default and must be explicitly
20
+ enabled either through the PYOPENAPI_TELEMETRY_ENABLED environment
21
+ variable or by passing enabled=True to the constructor.
22
+
23
+ Attributes:
24
+ enabled: Whether telemetry is currently enabled
25
+ """
26
+
27
+ def __init__(self, enabled: Optional[bool] = None) -> None:
28
+ """
29
+ Initialize a new TelemetryClient.
30
+
31
+ Args:
32
+ enabled: Explicitly enable or disable telemetry. If None, the environment
33
+ variable PYOPENAPI_TELEMETRY_ENABLED is checked.
34
+ """
35
+ if enabled is None:
36
+ env = os.getenv("PYOPENAPI_TELEMETRY_ENABLED", "false").lower()
37
+ self.enabled = env in ("1", "true", "yes")
38
+ else:
39
+ self.enabled = enabled
40
+
41
+ def track_event(self, event: str, properties: Optional[Dict[str, Any]] = None) -> None:
42
+ """
43
+ Track a telemetry event if telemetry is enabled.
44
+
45
+ This method sends a telemetry event with additional properties.
46
+ Events are silently dropped if telemetry is disabled.
47
+
48
+ Args:
49
+ event: The name of the event to track
50
+ properties: Optional dictionary of additional properties to include
51
+ """
52
+ if not self.enabled:
53
+ return
54
+
55
+ data: Dict[str, Any] = {
56
+ "event": event,
57
+ "properties": properties or {},
58
+ "timestamp": time.time(),
59
+ }
60
+
61
+ try:
62
+ # Using print as a stub for actual telemetry transport
63
+ # In production, this would be replaced with a proper telemetry client
64
+ print("TELEMETRY", json.dumps(data))
65
+ except Exception:
66
+ # Silently ignore any telemetry errors to avoid affecting main execution
67
+ pass