pyopenapi-gen 2.7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyopenapi_gen/__init__.py +224 -0
- pyopenapi_gen/__main__.py +6 -0
- pyopenapi_gen/cli.py +62 -0
- pyopenapi_gen/context/CLAUDE.md +284 -0
- pyopenapi_gen/context/file_manager.py +52 -0
- pyopenapi_gen/context/import_collector.py +382 -0
- pyopenapi_gen/context/render_context.py +726 -0
- pyopenapi_gen/core/CLAUDE.md +224 -0
- pyopenapi_gen/core/__init__.py +0 -0
- pyopenapi_gen/core/auth/base.py +22 -0
- pyopenapi_gen/core/auth/plugins.py +89 -0
- pyopenapi_gen/core/cattrs_converter.py +810 -0
- pyopenapi_gen/core/exceptions.py +20 -0
- pyopenapi_gen/core/http_status_codes.py +218 -0
- pyopenapi_gen/core/http_transport.py +222 -0
- pyopenapi_gen/core/loader/__init__.py +12 -0
- pyopenapi_gen/core/loader/loader.py +174 -0
- pyopenapi_gen/core/loader/operations/__init__.py +12 -0
- pyopenapi_gen/core/loader/operations/parser.py +161 -0
- pyopenapi_gen/core/loader/operations/post_processor.py +62 -0
- pyopenapi_gen/core/loader/operations/request_body.py +90 -0
- pyopenapi_gen/core/loader/parameters/__init__.py +10 -0
- pyopenapi_gen/core/loader/parameters/parser.py +186 -0
- pyopenapi_gen/core/loader/responses/__init__.py +10 -0
- pyopenapi_gen/core/loader/responses/parser.py +111 -0
- pyopenapi_gen/core/loader/schemas/__init__.py +11 -0
- pyopenapi_gen/core/loader/schemas/extractor.py +275 -0
- pyopenapi_gen/core/pagination.py +64 -0
- pyopenapi_gen/core/parsing/__init__.py +13 -0
- pyopenapi_gen/core/parsing/common/__init__.py +1 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/__init__.py +9 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/__init__.py +0 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/cyclic_properties.py +66 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/direct_cycle.py +33 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/existing_schema.py +22 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/list_response.py +54 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/missing_ref.py +52 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/new_schema.py +50 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/helpers/stripped_suffix.py +51 -0
- pyopenapi_gen/core/parsing/common/ref_resolution/resolve_schema_ref.py +86 -0
- pyopenapi_gen/core/parsing/common/type_parser.py +73 -0
- pyopenapi_gen/core/parsing/context.py +187 -0
- pyopenapi_gen/core/parsing/cycle_helpers.py +126 -0
- pyopenapi_gen/core/parsing/keywords/__init__.py +1 -0
- pyopenapi_gen/core/parsing/keywords/all_of_parser.py +81 -0
- pyopenapi_gen/core/parsing/keywords/any_of_parser.py +84 -0
- pyopenapi_gen/core/parsing/keywords/array_items_parser.py +72 -0
- pyopenapi_gen/core/parsing/keywords/one_of_parser.py +77 -0
- pyopenapi_gen/core/parsing/keywords/properties_parser.py +98 -0
- pyopenapi_gen/core/parsing/schema_finalizer.py +169 -0
- pyopenapi_gen/core/parsing/schema_parser.py +804 -0
- pyopenapi_gen/core/parsing/transformers/__init__.py +0 -0
- pyopenapi_gen/core/parsing/transformers/inline_enum_extractor.py +285 -0
- pyopenapi_gen/core/parsing/transformers/inline_object_promoter.py +120 -0
- pyopenapi_gen/core/parsing/unified_cycle_detection.py +293 -0
- pyopenapi_gen/core/postprocess_manager.py +260 -0
- pyopenapi_gen/core/spec_fetcher.py +148 -0
- pyopenapi_gen/core/streaming_helpers.py +84 -0
- pyopenapi_gen/core/telemetry.py +69 -0
- pyopenapi_gen/core/utils.py +456 -0
- pyopenapi_gen/core/warning_collector.py +83 -0
- pyopenapi_gen/core/writers/code_writer.py +135 -0
- pyopenapi_gen/core/writers/documentation_writer.py +222 -0
- pyopenapi_gen/core/writers/line_writer.py +217 -0
- pyopenapi_gen/core/writers/python_construct_renderer.py +321 -0
- pyopenapi_gen/core_package_template/README.md +21 -0
- pyopenapi_gen/emit/models_emitter.py +143 -0
- pyopenapi_gen/emitters/CLAUDE.md +286 -0
- pyopenapi_gen/emitters/client_emitter.py +51 -0
- pyopenapi_gen/emitters/core_emitter.py +181 -0
- pyopenapi_gen/emitters/docs_emitter.py +44 -0
- pyopenapi_gen/emitters/endpoints_emitter.py +247 -0
- pyopenapi_gen/emitters/exceptions_emitter.py +187 -0
- pyopenapi_gen/emitters/mocks_emitter.py +185 -0
- pyopenapi_gen/emitters/models_emitter.py +426 -0
- pyopenapi_gen/generator/CLAUDE.md +352 -0
- pyopenapi_gen/generator/client_generator.py +567 -0
- pyopenapi_gen/generator/exceptions.py +7 -0
- pyopenapi_gen/helpers/CLAUDE.md +325 -0
- pyopenapi_gen/helpers/__init__.py +1 -0
- pyopenapi_gen/helpers/endpoint_utils.py +532 -0
- pyopenapi_gen/helpers/type_cleaner.py +334 -0
- pyopenapi_gen/helpers/type_helper.py +112 -0
- pyopenapi_gen/helpers/type_resolution/__init__.py +1 -0
- pyopenapi_gen/helpers/type_resolution/array_resolver.py +57 -0
- pyopenapi_gen/helpers/type_resolution/composition_resolver.py +79 -0
- pyopenapi_gen/helpers/type_resolution/finalizer.py +105 -0
- pyopenapi_gen/helpers/type_resolution/named_resolver.py +172 -0
- pyopenapi_gen/helpers/type_resolution/object_resolver.py +216 -0
- pyopenapi_gen/helpers/type_resolution/primitive_resolver.py +109 -0
- pyopenapi_gen/helpers/type_resolution/resolver.py +47 -0
- pyopenapi_gen/helpers/url_utils.py +14 -0
- pyopenapi_gen/http_types.py +20 -0
- pyopenapi_gen/ir.py +165 -0
- pyopenapi_gen/py.typed +1 -0
- pyopenapi_gen/types/CLAUDE.md +140 -0
- pyopenapi_gen/types/__init__.py +11 -0
- pyopenapi_gen/types/contracts/__init__.py +13 -0
- pyopenapi_gen/types/contracts/protocols.py +106 -0
- pyopenapi_gen/types/contracts/types.py +28 -0
- pyopenapi_gen/types/resolvers/__init__.py +7 -0
- pyopenapi_gen/types/resolvers/reference_resolver.py +71 -0
- pyopenapi_gen/types/resolvers/response_resolver.py +177 -0
- pyopenapi_gen/types/resolvers/schema_resolver.py +498 -0
- pyopenapi_gen/types/services/__init__.py +5 -0
- pyopenapi_gen/types/services/type_service.py +165 -0
- pyopenapi_gen/types/strategies/__init__.py +5 -0
- pyopenapi_gen/types/strategies/response_strategy.py +310 -0
- pyopenapi_gen/visit/CLAUDE.md +272 -0
- pyopenapi_gen/visit/client_visitor.py +477 -0
- pyopenapi_gen/visit/docs_visitor.py +38 -0
- pyopenapi_gen/visit/endpoint/__init__.py +1 -0
- pyopenapi_gen/visit/endpoint/endpoint_visitor.py +292 -0
- pyopenapi_gen/visit/endpoint/generators/__init__.py +1 -0
- pyopenapi_gen/visit/endpoint/generators/docstring_generator.py +123 -0
- pyopenapi_gen/visit/endpoint/generators/endpoint_method_generator.py +222 -0
- pyopenapi_gen/visit/endpoint/generators/mock_generator.py +140 -0
- pyopenapi_gen/visit/endpoint/generators/overload_generator.py +252 -0
- pyopenapi_gen/visit/endpoint/generators/request_generator.py +103 -0
- pyopenapi_gen/visit/endpoint/generators/response_handler_generator.py +705 -0
- pyopenapi_gen/visit/endpoint/generators/signature_generator.py +83 -0
- pyopenapi_gen/visit/endpoint/generators/url_args_generator.py +207 -0
- pyopenapi_gen/visit/endpoint/processors/__init__.py +1 -0
- pyopenapi_gen/visit/endpoint/processors/import_analyzer.py +78 -0
- pyopenapi_gen/visit/endpoint/processors/parameter_processor.py +171 -0
- pyopenapi_gen/visit/exception_visitor.py +90 -0
- pyopenapi_gen/visit/model/__init__.py +0 -0
- pyopenapi_gen/visit/model/alias_generator.py +93 -0
- pyopenapi_gen/visit/model/dataclass_generator.py +553 -0
- pyopenapi_gen/visit/model/enum_generator.py +212 -0
- pyopenapi_gen/visit/model/model_visitor.py +198 -0
- pyopenapi_gen/visit/visitor.py +97 -0
- pyopenapi_gen-2.7.2.dist-info/METADATA +1169 -0
- pyopenapi_gen-2.7.2.dist-info/RECORD +137 -0
- pyopenapi_gen-2.7.2.dist-info/WHEEL +4 -0
- pyopenapi_gen-2.7.2.dist-info/entry_points.txt +2 -0
- pyopenapi_gen-2.7.2.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Module for handling stripped suffix fallback strategy.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from typing import Any, Callable, Mapping
|
|
7
|
+
|
|
8
|
+
from pyopenapi_gen.ir import IRSchema
|
|
9
|
+
|
|
10
|
+
from ....context import ParsingContext
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger(__name__)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def try_stripped_suffix_fallback(
|
|
16
|
+
ref_name: str,
|
|
17
|
+
ref_value: str,
|
|
18
|
+
context: ParsingContext,
|
|
19
|
+
max_depth: int,
|
|
20
|
+
parse_fn: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema],
|
|
21
|
+
) -> IRSchema | None:
|
|
22
|
+
"""
|
|
23
|
+
Attempts to resolve a reference by stripping common suffixes.
|
|
24
|
+
|
|
25
|
+
Contracts:
|
|
26
|
+
Pre-conditions:
|
|
27
|
+
- ref_name must be a valid schema name
|
|
28
|
+
- parse_fn must be a callable that parses schemas
|
|
29
|
+
- context must be a valid ParsingContext instance
|
|
30
|
+
Post-conditions:
|
|
31
|
+
- If successful, returns the resolved IRSchema
|
|
32
|
+
- If unsuccessful, returns None
|
|
33
|
+
- Successful resolutions are added to context.parsed_schemas
|
|
34
|
+
"""
|
|
35
|
+
suffixes = ["Response", "Request", "Input", "Output"]
|
|
36
|
+
|
|
37
|
+
for suffix in suffixes:
|
|
38
|
+
if ref_name.endswith(suffix):
|
|
39
|
+
base_name = ref_name[: -len(suffix)]
|
|
40
|
+
referenced_node_data_fallback = context.raw_spec_schemas.get(base_name)
|
|
41
|
+
|
|
42
|
+
if referenced_node_data_fallback:
|
|
43
|
+
resolved_schema = parse_fn(base_name, referenced_node_data_fallback, context, max_depth)
|
|
44
|
+
if not resolved_schema._from_unresolved_ref:
|
|
45
|
+
warning_msg = f"Resolved $ref: {ref_value} by falling back to base name '{base_name}'."
|
|
46
|
+
context.collected_warnings.append(warning_msg)
|
|
47
|
+
|
|
48
|
+
context.parsed_schemas[ref_name] = resolved_schema
|
|
49
|
+
return resolved_schema
|
|
50
|
+
|
|
51
|
+
return None
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Main module for schema reference resolution.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from typing import Any, Callable, Mapping
|
|
7
|
+
|
|
8
|
+
from pyopenapi_gen.ir import IRSchema
|
|
9
|
+
|
|
10
|
+
from ...context import ParsingContext
|
|
11
|
+
from .helpers.direct_cycle import handle_direct_cycle
|
|
12
|
+
from .helpers.existing_schema import handle_existing_schema
|
|
13
|
+
from .helpers.missing_ref import handle_missing_ref
|
|
14
|
+
from .helpers.new_schema import parse_new_schema
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def resolve_schema_ref(
|
|
20
|
+
ref_value: str,
|
|
21
|
+
ref_name: str,
|
|
22
|
+
context: ParsingContext,
|
|
23
|
+
max_depth: int,
|
|
24
|
+
_parse_schema: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int], IRSchema],
|
|
25
|
+
) -> IRSchema:
|
|
26
|
+
"""
|
|
27
|
+
Resolves a schema reference in an OpenAPI specification.
|
|
28
|
+
|
|
29
|
+
Contracts:
|
|
30
|
+
Pre-conditions:
|
|
31
|
+
- ref_value must be a valid reference string (e.g., "#/components/schemas/MySchema")
|
|
32
|
+
- ref_name must be a valid schema name
|
|
33
|
+
- context must be a valid ParsingContext instance
|
|
34
|
+
- max_depth must be a non-negative integer
|
|
35
|
+
- _parse_schema must be a callable that parses schemas
|
|
36
|
+
Post-conditions:
|
|
37
|
+
- Returns a valid IRSchema instance
|
|
38
|
+
- The schema is registered in context.parsed_schemas
|
|
39
|
+
- Cyclic references are handled appropriately
|
|
40
|
+
"""
|
|
41
|
+
# Extract the actual schema name from the reference
|
|
42
|
+
actual_schema_name = ref_value.split("/")[-1]
|
|
43
|
+
|
|
44
|
+
# Check for direct cycles or circular placeholders
|
|
45
|
+
if actual_schema_name in context.parsed_schemas:
|
|
46
|
+
existing_schema = context.parsed_schemas[actual_schema_name]
|
|
47
|
+
if getattr(existing_schema, "_is_circular_ref", False):
|
|
48
|
+
# logger.debug(f"Returning existing circular reference for '{actual_schema_name}'")
|
|
49
|
+
return existing_schema
|
|
50
|
+
# logger.debug(f"Direct cycle detected for '{actual_schema_name}', handling...")
|
|
51
|
+
return handle_direct_cycle(actual_schema_name, context)
|
|
52
|
+
|
|
53
|
+
# Check for existing fully parsed schema
|
|
54
|
+
if actual_schema_name in context.parsed_schemas:
|
|
55
|
+
# logger.debug(f"Returning existing fully parsed schema for '{actual_schema_name}'")
|
|
56
|
+
return handle_existing_schema(actual_schema_name, context)
|
|
57
|
+
|
|
58
|
+
# Get referenced node data
|
|
59
|
+
referenced_node_data = context.raw_spec_schemas.get(actual_schema_name)
|
|
60
|
+
|
|
61
|
+
# Handle missing references with stripped suffix fallback
|
|
62
|
+
if referenced_node_data is None:
|
|
63
|
+
# Try stripping common suffixes
|
|
64
|
+
base_name = actual_schema_name
|
|
65
|
+
for suffix in ["Response", "Request", "Input", "Output"]:
|
|
66
|
+
if base_name.endswith(suffix):
|
|
67
|
+
base_name = base_name[: -len(suffix)]
|
|
68
|
+
if base_name in context.raw_spec_schemas:
|
|
69
|
+
# logger.debug(f"Found schema '{base_name}' after stripping suffix '{suffix}'")
|
|
70
|
+
referenced_node_data = context.raw_spec_schemas[base_name]
|
|
71
|
+
break
|
|
72
|
+
|
|
73
|
+
if referenced_node_data is None:
|
|
74
|
+
logger.warning(f"Missing reference '{ref_value}' for schema '{ref_name}'")
|
|
75
|
+
return handle_missing_ref(ref_value, ref_name, context, max_depth, _parse_schema)
|
|
76
|
+
|
|
77
|
+
# Standard parsing path for a new schema
|
|
78
|
+
# logger.debug(f"Parsing new schema '{actual_schema_name}'")
|
|
79
|
+
schema = parse_new_schema(actual_schema_name, dict(referenced_node_data), context, max_depth, _parse_schema)
|
|
80
|
+
|
|
81
|
+
# Store the schema under the requested reference name if different
|
|
82
|
+
# Don't mutate the original schema name to avoid affecting other references
|
|
83
|
+
if schema.name != ref_name:
|
|
84
|
+
context.parsed_schemas[ref_name] = schema
|
|
85
|
+
|
|
86
|
+
return schema
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Dedicated parser for determining primary type and nullability from a schema's 'type' field.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
from typing import (
|
|
8
|
+
Any,
|
|
9
|
+
List,
|
|
10
|
+
Tuple,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
# Note: IRSchema is not needed here as this function doesn't construct it.
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def extract_primary_type_and_nullability(
|
|
17
|
+
type_node: Any, schema_name: str | None = None
|
|
18
|
+
) -> Tuple[str | None, bool, List[str]]:
|
|
19
|
+
"""Extract the primary type and nullability from a schema's 'type' field.
|
|
20
|
+
|
|
21
|
+
Contracts:
|
|
22
|
+
Pre-conditions:
|
|
23
|
+
- type_node is the value of the 'type' field from a schema
|
|
24
|
+
Post-conditions:
|
|
25
|
+
- Returns a tuple of (primary_type, is_nullable, warnings)
|
|
26
|
+
- primary_type is None if type_node is None or invalid
|
|
27
|
+
- is_nullable is True if type_node is 'null' or contains 'null'
|
|
28
|
+
- warnings contains any warnings about type handling
|
|
29
|
+
"""
|
|
30
|
+
warnings: List[str] = []
|
|
31
|
+
is_nullable = False
|
|
32
|
+
|
|
33
|
+
if type_node is None:
|
|
34
|
+
return None, False, warnings
|
|
35
|
+
|
|
36
|
+
# Handle array of types
|
|
37
|
+
if isinstance(type_node, list):
|
|
38
|
+
if not type_node:
|
|
39
|
+
warnings.append(f"Empty type array in schema '{schema_name}'")
|
|
40
|
+
return None, False, warnings
|
|
41
|
+
|
|
42
|
+
# Check for nullability
|
|
43
|
+
if "null" in type_node:
|
|
44
|
+
is_nullable = True
|
|
45
|
+
type_node = [t for t in type_node if t != "null"]
|
|
46
|
+
|
|
47
|
+
if not type_node:
|
|
48
|
+
warnings.append(f"Only 'null' type in array for schema '{schema_name}'")
|
|
49
|
+
return None, True, warnings
|
|
50
|
+
|
|
51
|
+
# Use the first non-null type
|
|
52
|
+
primary_type = type_node[0]
|
|
53
|
+
if len(type_node) > 1:
|
|
54
|
+
warnings.append(f"Multiple types in array for schema '{schema_name}'. Using first type: {primary_type}")
|
|
55
|
+
else:
|
|
56
|
+
primary_type = type_node
|
|
57
|
+
|
|
58
|
+
# Validate the type
|
|
59
|
+
if not isinstance(primary_type, str):
|
|
60
|
+
warnings.append(f"Invalid type value '{primary_type}' in schema '{schema_name}'")
|
|
61
|
+
return None, is_nullable, warnings
|
|
62
|
+
|
|
63
|
+
# Normalize the type
|
|
64
|
+
primary_type = primary_type.lower()
|
|
65
|
+
if primary_type not in {"string", "number", "integer", "boolean", "object", "array", "null"}:
|
|
66
|
+
warnings.append(f"Unknown type '{primary_type}' in schema '{schema_name}'")
|
|
67
|
+
return None, is_nullable, warnings
|
|
68
|
+
|
|
69
|
+
# If the determined primary_type is "null", it means the actual type is None, but it IS nullable.
|
|
70
|
+
if primary_type == "null":
|
|
71
|
+
return None, True, warnings # Ensure is_nullable is True
|
|
72
|
+
|
|
73
|
+
return primary_type, is_nullable, warnings
|
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Defines the ParsingContext dataclass used to manage state during OpenAPI schema parsing.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
import os
|
|
9
|
+
from dataclasses import dataclass, field
|
|
10
|
+
from typing import TYPE_CHECKING, Any, List, Mapping, Set, Tuple
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from pyopenapi_gen import IRSchema
|
|
14
|
+
|
|
15
|
+
# from pyopenapi_gen.core.utils import NameSanitizer # If needed later
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class ParsingContext:
|
|
22
|
+
"""Manages shared state and context during the schema parsing process."""
|
|
23
|
+
|
|
24
|
+
raw_spec_schemas: dict[str, Mapping[str, Any]] = field(default_factory=dict)
|
|
25
|
+
raw_spec_components: Mapping[str, Any] = field(default_factory=dict)
|
|
26
|
+
parsed_schemas: dict[str, IRSchema] = field(default_factory=dict)
|
|
27
|
+
visited_refs: Set[str] = field(default_factory=set)
|
|
28
|
+
global_schema_names: Set[str] = field(default_factory=set)
|
|
29
|
+
package_root_name: str | None = None
|
|
30
|
+
# name_sanitizer: NameSanitizer = field(default_factory=NameSanitizer) # Decided to instantiate where needed for now
|
|
31
|
+
collected_warnings: List[str] = field(default_factory=list) # For collecting warnings from helpers
|
|
32
|
+
|
|
33
|
+
# Cycle detection
|
|
34
|
+
currently_parsing: List[str] = field(default_factory=list)
|
|
35
|
+
recursion_depth: int = 0
|
|
36
|
+
cycle_detected: bool = False
|
|
37
|
+
|
|
38
|
+
def __post_init__(self) -> None:
|
|
39
|
+
# Initialize logger for the context instance if needed, or rely on module logger
|
|
40
|
+
self.logger = logger # or logging.getLogger(f"{__name__}.ParsingContext")
|
|
41
|
+
|
|
42
|
+
# Initialize unified cycle detection context
|
|
43
|
+
# Import here to avoid circular imports
|
|
44
|
+
from .unified_cycle_detection import UnifiedCycleContext
|
|
45
|
+
|
|
46
|
+
# Get max depth from environment or default
|
|
47
|
+
max_depth = int(os.environ.get("PYOPENAPI_MAX_DEPTH", 150))
|
|
48
|
+
|
|
49
|
+
self.unified_cycle_context = UnifiedCycleContext(
|
|
50
|
+
parsed_schemas=self.parsed_schemas,
|
|
51
|
+
max_depth=max_depth, # Share the same parsed_schemas dict
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
def unified_enter_schema(self, schema_name: str | None) -> Any:
|
|
55
|
+
"""Enter schema using unified cycle detection system."""
|
|
56
|
+
from .unified_cycle_detection import unified_enter_schema
|
|
57
|
+
|
|
58
|
+
result = unified_enter_schema(schema_name, self.unified_cycle_context)
|
|
59
|
+
|
|
60
|
+
# Update legacy fields for backward compatibility
|
|
61
|
+
self.recursion_depth = self.unified_cycle_context.recursion_depth
|
|
62
|
+
self.cycle_detected = self.unified_cycle_context.cycle_detected
|
|
63
|
+
self.currently_parsing = self.unified_cycle_context.schema_stack.copy()
|
|
64
|
+
|
|
65
|
+
return result
|
|
66
|
+
|
|
67
|
+
def unified_exit_schema(self, schema_name: str | None) -> None:
|
|
68
|
+
"""Exit schema using unified cycle detection system."""
|
|
69
|
+
from .unified_cycle_detection import unified_exit_schema
|
|
70
|
+
|
|
71
|
+
unified_exit_schema(schema_name, self.unified_cycle_context)
|
|
72
|
+
|
|
73
|
+
# Update legacy fields for backward compatibility
|
|
74
|
+
self.recursion_depth = self.unified_cycle_context.recursion_depth
|
|
75
|
+
self.currently_parsing = self.unified_cycle_context.schema_stack.copy()
|
|
76
|
+
|
|
77
|
+
def clear_cycle_state(self) -> None:
|
|
78
|
+
"""Clear both legacy and unified cycle detection state."""
|
|
79
|
+
# Clear legacy state
|
|
80
|
+
self.currently_parsing.clear()
|
|
81
|
+
self.recursion_depth = 0
|
|
82
|
+
self.cycle_detected = False
|
|
83
|
+
|
|
84
|
+
# Clear unified context state
|
|
85
|
+
self.unified_cycle_context.schema_stack.clear()
|
|
86
|
+
self.unified_cycle_context.schema_states.clear()
|
|
87
|
+
self.unified_cycle_context.recursion_depth = 0
|
|
88
|
+
self.unified_cycle_context.detected_cycles.clear()
|
|
89
|
+
self.unified_cycle_context.depth_exceeded_schemas.clear()
|
|
90
|
+
self.unified_cycle_context.cycle_detected = False
|
|
91
|
+
|
|
92
|
+
def enter_schema(self, schema_name: str | None) -> Tuple[bool, str | None]:
|
|
93
|
+
self.recursion_depth += 1
|
|
94
|
+
|
|
95
|
+
if schema_name is None:
|
|
96
|
+
return False, None
|
|
97
|
+
|
|
98
|
+
# Named cycle detection using ordered list currently_parsing
|
|
99
|
+
if schema_name in self.currently_parsing:
|
|
100
|
+
self.cycle_detected = True
|
|
101
|
+
try:
|
|
102
|
+
start_index = self.currently_parsing.index(schema_name)
|
|
103
|
+
# Path is from the first occurrence of schema_name to the current end of stack
|
|
104
|
+
cycle_path_list = self.currently_parsing[start_index:]
|
|
105
|
+
except ValueError: # Should not happen
|
|
106
|
+
cycle_path_list = list(self.currently_parsing) # Fallback
|
|
107
|
+
|
|
108
|
+
cycle_path_list.append(schema_name) # Add the re-entrant schema_name to show the loop
|
|
109
|
+
cycle_path_str = " -> ".join(cycle_path_list)
|
|
110
|
+
|
|
111
|
+
return True, cycle_path_str
|
|
112
|
+
|
|
113
|
+
self.currently_parsing.append(schema_name)
|
|
114
|
+
return False, None
|
|
115
|
+
|
|
116
|
+
def exit_schema(self, schema_name: str | None) -> None:
|
|
117
|
+
if self.recursion_depth == 0:
|
|
118
|
+
self.logger.error("Cannot exit schema: recursion depth would go below zero.")
|
|
119
|
+
return
|
|
120
|
+
|
|
121
|
+
self.recursion_depth -= 1
|
|
122
|
+
if schema_name is not None:
|
|
123
|
+
if self.currently_parsing and self.currently_parsing[-1] == schema_name:
|
|
124
|
+
self.currently_parsing.pop()
|
|
125
|
+
elif (
|
|
126
|
+
schema_name in self.currently_parsing
|
|
127
|
+
): # Not last on stack but present: indicates mismatched enter/exit or error
|
|
128
|
+
self.logger.error(
|
|
129
|
+
f"Exiting schema '{schema_name}' which is not at the top of the parsing stack. "
|
|
130
|
+
f"Stack: {self.currently_parsing}. This indicates an issue."
|
|
131
|
+
)
|
|
132
|
+
# Attempt to remove it to prevent it being stuck, though this is a recovery attempt.
|
|
133
|
+
try:
|
|
134
|
+
self.currently_parsing.remove(schema_name)
|
|
135
|
+
except ValueError:
|
|
136
|
+
pass # Should not happen if it was in the list.
|
|
137
|
+
# If schema_name is None, or (it's not None and not in currently_parsing), do nothing to currently_parsing.
|
|
138
|
+
# The latter case could be if exit_schema is called for a schema_name that wasn't pushed
|
|
139
|
+
# (e.g., after yielding a placeholder, where the original enter_schema
|
|
140
|
+
# didn't add it because it was already a cycle).
|
|
141
|
+
|
|
142
|
+
def reset_for_new_parse(self) -> None:
|
|
143
|
+
self.recursion_depth = 0
|
|
144
|
+
self.cycle_detected = False
|
|
145
|
+
self.currently_parsing.clear()
|
|
146
|
+
self.parsed_schemas.clear()
|
|
147
|
+
|
|
148
|
+
def get_current_path_for_logging(self) -> str:
|
|
149
|
+
"""Helper to get a string representation of the current parsing path for logs."""
|
|
150
|
+
return " -> ".join(self.currently_parsing)
|
|
151
|
+
|
|
152
|
+
def get_parsed_schemas_for_emitter(self) -> dict[str, IRSchema]:
|
|
153
|
+
# ---- START RESTORE ----
|
|
154
|
+
return {
|
|
155
|
+
name: schema
|
|
156
|
+
for name, schema in self.parsed_schemas.items()
|
|
157
|
+
if not getattr(schema, "_is_circular_ref", False)
|
|
158
|
+
and not getattr(schema, "_from_unresolved_ref", False)
|
|
159
|
+
and not getattr(schema, "_max_depth_exceeded_marker", False)
|
|
160
|
+
}
|
|
161
|
+
# ---- END RESTORE ----
|
|
162
|
+
|
|
163
|
+
def is_schema_parsed(self, schema_name: str) -> bool:
|
|
164
|
+
"""Check if a schema with the given name has been parsed.
|
|
165
|
+
|
|
166
|
+
Contracts:
|
|
167
|
+
Preconditions:
|
|
168
|
+
- schema_name is a valid string
|
|
169
|
+
Postconditions:
|
|
170
|
+
- Returns True if the schema exists in parsed_schemas, False otherwise
|
|
171
|
+
"""
|
|
172
|
+
if not isinstance(schema_name, str):
|
|
173
|
+
raise TypeError("schema_name must be a string")
|
|
174
|
+
return schema_name in self.parsed_schemas
|
|
175
|
+
|
|
176
|
+
def get_parsed_schema(self, schema_name: str) -> "IRSchema" | None:
|
|
177
|
+
"""Get a parsed schema by its name.
|
|
178
|
+
|
|
179
|
+
Contracts:
|
|
180
|
+
Preconditions:
|
|
181
|
+
- schema_name is a valid string
|
|
182
|
+
Postconditions:
|
|
183
|
+
- Returns the IRSchema if it exists, None otherwise
|
|
184
|
+
"""
|
|
185
|
+
if not isinstance(schema_name, str):
|
|
186
|
+
raise TypeError("schema_name must be a string")
|
|
187
|
+
return self.parsed_schemas.get(schema_name)
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import TYPE_CHECKING
|
|
3
|
+
|
|
4
|
+
if TYPE_CHECKING:
|
|
5
|
+
pass
|
|
6
|
+
|
|
7
|
+
from pyopenapi_gen import IRSchema
|
|
8
|
+
from pyopenapi_gen.core.utils import NameSanitizer
|
|
9
|
+
|
|
10
|
+
from .context import ParsingContext
|
|
11
|
+
|
|
12
|
+
# Define module-level logger
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _handle_cycle_detection(
|
|
17
|
+
original_name: str, cycle_path: str, context: ParsingContext, allow_self_reference: bool
|
|
18
|
+
) -> IRSchema:
|
|
19
|
+
"""Handle case where a cycle is detected in schema references.
|
|
20
|
+
|
|
21
|
+
Contracts:
|
|
22
|
+
Pre-conditions:
|
|
23
|
+
- original_name is not None
|
|
24
|
+
- context is a valid ParsingContext instance
|
|
25
|
+
- allow_self_reference indicates if direct self-references are permitted without being treated as errors.
|
|
26
|
+
Post-conditions:
|
|
27
|
+
- Returns an IRSchema instance.
|
|
28
|
+
- If not a permitted self-reference, it's marked as circular and registered.
|
|
29
|
+
- If a permitted self-reference, a placeholder is returned and not marked as an error cycle.
|
|
30
|
+
"""
|
|
31
|
+
schema_ir_name_attr = NameSanitizer.sanitize_class_name(original_name)
|
|
32
|
+
|
|
33
|
+
# Check for direct self-reference when allowed
|
|
34
|
+
path_parts = cycle_path.split(" -> ")
|
|
35
|
+
is_direct_self_ref = len(path_parts) == 2 and path_parts[0] == original_name and path_parts[1] == original_name
|
|
36
|
+
|
|
37
|
+
if allow_self_reference and is_direct_self_ref:
|
|
38
|
+
# Permitted direct self-reference, creating placeholder without marking as error cycle
|
|
39
|
+
if original_name not in context.parsed_schemas:
|
|
40
|
+
# Create a basic placeholder. It will be fully populated when its real definition is parsed.
|
|
41
|
+
# Key is NOT to mark _is_circular_ref = True here.
|
|
42
|
+
schema = IRSchema(
|
|
43
|
+
name=schema_ir_name_attr,
|
|
44
|
+
type="object", # Default type, might be refined if we parse its own definition later
|
|
45
|
+
description=f"[Self-referential placeholder for {original_name}]",
|
|
46
|
+
_from_unresolved_ref=False, # Not unresolved in the error sense
|
|
47
|
+
_is_self_referential_stub=True, # New flag to indicate this state
|
|
48
|
+
)
|
|
49
|
+
context.parsed_schemas[original_name] = schema
|
|
50
|
+
return schema
|
|
51
|
+
else:
|
|
52
|
+
# If it's already in parsed_schemas, it means we're re-entering it.
|
|
53
|
+
# This could happen if it was created as a placeholder by another ref first.
|
|
54
|
+
# Ensure it's marked as a self-referential stub if not already.
|
|
55
|
+
existing_schema = context.parsed_schemas[original_name]
|
|
56
|
+
if not getattr(existing_schema, "_is_self_referential_stub", False):
|
|
57
|
+
existing_schema._is_self_referential_stub = True # Mark it
|
|
58
|
+
return existing_schema
|
|
59
|
+
|
|
60
|
+
# If not a permitted direct self-reference, or if self-references are not allowed, proceed with error cycle handling
|
|
61
|
+
if original_name not in context.parsed_schemas:
|
|
62
|
+
schema = IRSchema(
|
|
63
|
+
name=schema_ir_name_attr,
|
|
64
|
+
type="object",
|
|
65
|
+
description=f"[Circular reference detected: {cycle_path}]",
|
|
66
|
+
_from_unresolved_ref=True,
|
|
67
|
+
_circular_ref_path=cycle_path,
|
|
68
|
+
_is_circular_ref=True,
|
|
69
|
+
)
|
|
70
|
+
context.parsed_schemas[original_name] = schema
|
|
71
|
+
else:
|
|
72
|
+
schema = context.parsed_schemas[original_name]
|
|
73
|
+
schema._is_circular_ref = True
|
|
74
|
+
schema._from_unresolved_ref = True
|
|
75
|
+
schema._circular_ref_path = cycle_path
|
|
76
|
+
if schema.name != schema_ir_name_attr:
|
|
77
|
+
schema.name = schema_ir_name_attr
|
|
78
|
+
|
|
79
|
+
context.cycle_detected = True
|
|
80
|
+
return schema
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _handle_max_depth_exceeded(original_name: str | None, context: ParsingContext, max_depth: int) -> IRSchema:
|
|
84
|
+
"""Handle case where maximum recursion depth is exceeded.
|
|
85
|
+
|
|
86
|
+
Contracts:
|
|
87
|
+
Pre-conditions:
|
|
88
|
+
- context is a valid ParsingContext instance
|
|
89
|
+
- max_depth >= 0
|
|
90
|
+
Post-conditions:
|
|
91
|
+
- Returns an IRSchema instance marked with _max_depth_exceeded_marker=True
|
|
92
|
+
- If original_name is provided, the schema is registered in context.parsed_schemas
|
|
93
|
+
"""
|
|
94
|
+
schema_ir_name_attr = NameSanitizer.sanitize_class_name(original_name) if original_name else None
|
|
95
|
+
|
|
96
|
+
# path_prefix = schema_ir_name_attr if schema_ir_name_attr else "<anonymous_schema>"
|
|
97
|
+
# cycle_path_for_desc = f"{path_prefix} -> MAX_DEPTH_EXCEEDED"
|
|
98
|
+
description = f"[Maximum recursion depth ({max_depth}) exceeded for '{original_name or 'anonymous'}']"
|
|
99
|
+
logger.warning(description)
|
|
100
|
+
|
|
101
|
+
placeholder_schema = IRSchema(
|
|
102
|
+
name=schema_ir_name_attr,
|
|
103
|
+
type="object", # Default type for a placeholder created due to depth
|
|
104
|
+
description=description,
|
|
105
|
+
_max_depth_exceeded_marker=True,
|
|
106
|
+
# Do NOT set _is_circular_ref or _from_unresolved_ref here just for depth limit
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
if original_name is not None:
|
|
110
|
+
if original_name not in context.parsed_schemas:
|
|
111
|
+
context.parsed_schemas[original_name] = placeholder_schema
|
|
112
|
+
else:
|
|
113
|
+
# If a schema with this name already exists (e.g. a forward ref stub),
|
|
114
|
+
# update it to mark that max depth was hit during its resolution attempt.
|
|
115
|
+
# This is tricky because we don't want to overwrite a fully parsed schema.
|
|
116
|
+
# For now, let's assume if we are here, the existing one is also some form of placeholder
|
|
117
|
+
# or its parsing was interrupted to get here.
|
|
118
|
+
existing_schema = context.parsed_schemas[original_name]
|
|
119
|
+
existing_schema.description = description # Update description
|
|
120
|
+
existing_schema._max_depth_exceeded_marker = True
|
|
121
|
+
# Avoid re-assigning to placeholder_schema directly to keep existing IR object if it was complex
|
|
122
|
+
# and just needs this flag + description update.
|
|
123
|
+
return existing_schema # Return the (now updated) existing schema
|
|
124
|
+
|
|
125
|
+
# context.cycle_detected = True # Max depth is not strictly a cycle in the schema definition itself
|
|
126
|
+
return placeholder_schema
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# keyword-specific parsers
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Handles the 'allOf' keyword in an OpenAPI schema, merging properties and required fields.
|
|
3
|
+
Renamed from all_of_merger to all_of_parser for consistency.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import os
|
|
9
|
+
from typing import TYPE_CHECKING, Any, Callable, List, Mapping, Set, Tuple
|
|
10
|
+
|
|
11
|
+
from pyopenapi_gen import IRSchema
|
|
12
|
+
|
|
13
|
+
from ..context import ParsingContext
|
|
14
|
+
|
|
15
|
+
ENV_MAX_DEPTH = int(os.environ.get("PYOPENAPI_MAX_DEPTH", "150"))
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _process_all_of(
|
|
22
|
+
node: Mapping[str, Any],
|
|
23
|
+
current_schema_name: str | None,
|
|
24
|
+
context: ParsingContext,
|
|
25
|
+
_parse_schema_func: Callable[[str | None, Mapping[str, Any] | None, ParsingContext, int | None], IRSchema],
|
|
26
|
+
max_depth: int = ENV_MAX_DEPTH,
|
|
27
|
+
) -> Tuple[dict[str, IRSchema], Set[str], List[IRSchema]]:
|
|
28
|
+
"""Processes the 'allOf' keyword in a schema node.
|
|
29
|
+
|
|
30
|
+
Merges properties and required fields from all sub-schemas listed in 'allOf'
|
|
31
|
+
and also from any direct 'properties' defined at the same level as 'allOf'.
|
|
32
|
+
|
|
33
|
+
Contracts:
|
|
34
|
+
Pre-conditions:
|
|
35
|
+
- node is a non-empty mapping representing an OpenAPI schema node.
|
|
36
|
+
- context is a valid ParsingContext instance.
|
|
37
|
+
- _parse_schema_func is a callable function.
|
|
38
|
+
- max_depth is a non-negative integer.
|
|
39
|
+
Post-conditions:
|
|
40
|
+
- Returns a tuple containing:
|
|
41
|
+
- merged_properties: Dict of property names to IRSchema.
|
|
42
|
+
- merged_required: Set of required property names.
|
|
43
|
+
- parsed_all_of_components: List of IRSchema for each item in 'allOf' (empty if 'allOf' not present).
|
|
44
|
+
"""
|
|
45
|
+
# Pre-conditions
|
|
46
|
+
if not (isinstance(node, Mapping) and node):
|
|
47
|
+
raise TypeError("node must be a non-empty Mapping")
|
|
48
|
+
if not isinstance(context, ParsingContext):
|
|
49
|
+
raise TypeError("context must be a ParsingContext instance")
|
|
50
|
+
if not callable(_parse_schema_func):
|
|
51
|
+
raise TypeError("_parse_schema_func must be callable")
|
|
52
|
+
if not (isinstance(max_depth, int) and max_depth >= 0):
|
|
53
|
+
raise ValueError("max_depth must be a non-negative integer")
|
|
54
|
+
|
|
55
|
+
parsed_all_of_components: List[IRSchema] = []
|
|
56
|
+
merged_required: Set[str] = set(node.get("required", []))
|
|
57
|
+
merged_properties: dict[str, IRSchema] = {}
|
|
58
|
+
|
|
59
|
+
if "allOf" not in node:
|
|
60
|
+
current_node_direct_properties = node.get("properties", {})
|
|
61
|
+
for prop_name, prop_data in current_node_direct_properties.items():
|
|
62
|
+
prop_schema_name_context = f"{current_schema_name}.{prop_name}" if current_schema_name else prop_name
|
|
63
|
+
merged_properties[prop_name] = _parse_schema_func(prop_schema_name_context, prop_data, context, max_depth)
|
|
64
|
+
return merged_properties, merged_required, parsed_all_of_components
|
|
65
|
+
|
|
66
|
+
for sub_node in node["allOf"]:
|
|
67
|
+
sub_schema_ir = _parse_schema_func(None, sub_node, context, max_depth)
|
|
68
|
+
parsed_all_of_components.append(sub_schema_ir)
|
|
69
|
+
if sub_schema_ir.properties:
|
|
70
|
+
for prop_name, prop_schema_val in sub_schema_ir.properties.items():
|
|
71
|
+
if prop_name not in merged_properties:
|
|
72
|
+
merged_properties[prop_name] = prop_schema_val
|
|
73
|
+
if sub_schema_ir.required:
|
|
74
|
+
merged_required.update(sub_schema_ir.required)
|
|
75
|
+
|
|
76
|
+
current_node_direct_properties = node.get("properties", {})
|
|
77
|
+
for prop_name, prop_data in current_node_direct_properties.items():
|
|
78
|
+
prop_schema_name_context = f"{current_schema_name}.{prop_name}" if current_schema_name else prop_name
|
|
79
|
+
merged_properties[prop_name] = _parse_schema_func(prop_schema_name_context, prop_data, context, max_depth)
|
|
80
|
+
|
|
81
|
+
return merged_properties, merged_required, parsed_all_of_components
|