pyopenapi-gen 0.8.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. pyopenapi_gen/__init__.py +114 -0
  2. pyopenapi_gen/__main__.py +6 -0
  3. pyopenapi_gen/cli.py +86 -0
  4. pyopenapi_gen/context/file_manager.py +52 -0
  5. pyopenapi_gen/context/import_collector.py +382 -0
  6. pyopenapi_gen/context/render_context.py +630 -0
  7. pyopenapi_gen/core/__init__.py +0 -0
  8. pyopenapi_gen/core/auth/base.py +22 -0
  9. pyopenapi_gen/core/auth/plugins.py +89 -0
  10. pyopenapi_gen/core/exceptions.py +25 -0
  11. pyopenapi_gen/core/http_transport.py +219 -0
  12. pyopenapi_gen/core/loader/__init__.py +12 -0
  13. pyopenapi_gen/core/loader/loader.py +158 -0
  14. pyopenapi_gen/core/loader/operations/__init__.py +12 -0
  15. pyopenapi_gen/core/loader/operations/parser.py +155 -0
  16. pyopenapi_gen/core/loader/operations/post_processor.py +60 -0
  17. pyopenapi_gen/core/loader/operations/request_body.py +85 -0
  18. pyopenapi_gen/core/loader/parameters/__init__.py +10 -0
  19. pyopenapi_gen/core/loader/parameters/parser.py +121 -0
  20. pyopenapi_gen/core/loader/responses/__init__.py +10 -0
  21. pyopenapi_gen/core/loader/responses/parser.py +104 -0
  22. pyopenapi_gen/core/loader/schemas/__init__.py +11 -0
  23. pyopenapi_gen/core/loader/schemas/extractor.py +184 -0
  24. pyopenapi_gen/core/pagination.py +64 -0
  25. pyopenapi_gen/core/parsing/__init__.py +13 -0
  26. pyopenapi_gen/core/parsing/common/__init__.py +1 -0
  27. pyopenapi_gen/core/parsing/common/ref_resolution/__init__.py +9 -0
  28. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/__init__.py +0 -0
  29. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/cyclic_properties.py +66 -0
  30. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/direct_cycle.py +33 -0
  31. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/existing_schema.py +22 -0
  32. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/list_response.py +54 -0
  33. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/missing_ref.py +52 -0
  34. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/new_schema.py +50 -0
  35. pyopenapi_gen/core/parsing/common/ref_resolution/helpers/stripped_suffix.py +51 -0
  36. pyopenapi_gen/core/parsing/common/ref_resolution/resolve_schema_ref.py +86 -0
  37. pyopenapi_gen/core/parsing/common/type_parser.py +74 -0
  38. pyopenapi_gen/core/parsing/context.py +184 -0
  39. pyopenapi_gen/core/parsing/cycle_helpers.py +123 -0
  40. pyopenapi_gen/core/parsing/keywords/__init__.py +1 -0
  41. pyopenapi_gen/core/parsing/keywords/all_of_parser.py +77 -0
  42. pyopenapi_gen/core/parsing/keywords/any_of_parser.py +79 -0
  43. pyopenapi_gen/core/parsing/keywords/array_items_parser.py +69 -0
  44. pyopenapi_gen/core/parsing/keywords/one_of_parser.py +72 -0
  45. pyopenapi_gen/core/parsing/keywords/properties_parser.py +98 -0
  46. pyopenapi_gen/core/parsing/schema_finalizer.py +166 -0
  47. pyopenapi_gen/core/parsing/schema_parser.py +610 -0
  48. pyopenapi_gen/core/parsing/transformers/__init__.py +0 -0
  49. pyopenapi_gen/core/parsing/transformers/inline_enum_extractor.py +285 -0
  50. pyopenapi_gen/core/parsing/transformers/inline_object_promoter.py +117 -0
  51. pyopenapi_gen/core/parsing/unified_cycle_detection.py +293 -0
  52. pyopenapi_gen/core/postprocess_manager.py +161 -0
  53. pyopenapi_gen/core/schemas.py +40 -0
  54. pyopenapi_gen/core/streaming_helpers.py +86 -0
  55. pyopenapi_gen/core/telemetry.py +67 -0
  56. pyopenapi_gen/core/utils.py +409 -0
  57. pyopenapi_gen/core/warning_collector.py +83 -0
  58. pyopenapi_gen/core/writers/code_writer.py +135 -0
  59. pyopenapi_gen/core/writers/documentation_writer.py +222 -0
  60. pyopenapi_gen/core/writers/line_writer.py +217 -0
  61. pyopenapi_gen/core/writers/python_construct_renderer.py +274 -0
  62. pyopenapi_gen/core_package_template/README.md +21 -0
  63. pyopenapi_gen/emit/models_emitter.py +143 -0
  64. pyopenapi_gen/emitters/client_emitter.py +51 -0
  65. pyopenapi_gen/emitters/core_emitter.py +181 -0
  66. pyopenapi_gen/emitters/docs_emitter.py +44 -0
  67. pyopenapi_gen/emitters/endpoints_emitter.py +223 -0
  68. pyopenapi_gen/emitters/exceptions_emitter.py +52 -0
  69. pyopenapi_gen/emitters/models_emitter.py +428 -0
  70. pyopenapi_gen/generator/client_generator.py +562 -0
  71. pyopenapi_gen/helpers/__init__.py +1 -0
  72. pyopenapi_gen/helpers/endpoint_utils.py +552 -0
  73. pyopenapi_gen/helpers/type_cleaner.py +341 -0
  74. pyopenapi_gen/helpers/type_helper.py +112 -0
  75. pyopenapi_gen/helpers/type_resolution/__init__.py +1 -0
  76. pyopenapi_gen/helpers/type_resolution/array_resolver.py +57 -0
  77. pyopenapi_gen/helpers/type_resolution/composition_resolver.py +79 -0
  78. pyopenapi_gen/helpers/type_resolution/finalizer.py +89 -0
  79. pyopenapi_gen/helpers/type_resolution/named_resolver.py +174 -0
  80. pyopenapi_gen/helpers/type_resolution/object_resolver.py +212 -0
  81. pyopenapi_gen/helpers/type_resolution/primitive_resolver.py +57 -0
  82. pyopenapi_gen/helpers/type_resolution/resolver.py +48 -0
  83. pyopenapi_gen/helpers/url_utils.py +14 -0
  84. pyopenapi_gen/http_types.py +20 -0
  85. pyopenapi_gen/ir.py +167 -0
  86. pyopenapi_gen/py.typed +1 -0
  87. pyopenapi_gen/types/__init__.py +11 -0
  88. pyopenapi_gen/types/contracts/__init__.py +13 -0
  89. pyopenapi_gen/types/contracts/protocols.py +106 -0
  90. pyopenapi_gen/types/contracts/types.py +30 -0
  91. pyopenapi_gen/types/resolvers/__init__.py +7 -0
  92. pyopenapi_gen/types/resolvers/reference_resolver.py +71 -0
  93. pyopenapi_gen/types/resolvers/response_resolver.py +203 -0
  94. pyopenapi_gen/types/resolvers/schema_resolver.py +367 -0
  95. pyopenapi_gen/types/services/__init__.py +5 -0
  96. pyopenapi_gen/types/services/type_service.py +133 -0
  97. pyopenapi_gen/visit/client_visitor.py +228 -0
  98. pyopenapi_gen/visit/docs_visitor.py +38 -0
  99. pyopenapi_gen/visit/endpoint/__init__.py +1 -0
  100. pyopenapi_gen/visit/endpoint/endpoint_visitor.py +103 -0
  101. pyopenapi_gen/visit/endpoint/generators/__init__.py +1 -0
  102. pyopenapi_gen/visit/endpoint/generators/docstring_generator.py +121 -0
  103. pyopenapi_gen/visit/endpoint/generators/endpoint_method_generator.py +87 -0
  104. pyopenapi_gen/visit/endpoint/generators/request_generator.py +103 -0
  105. pyopenapi_gen/visit/endpoint/generators/response_handler_generator.py +497 -0
  106. pyopenapi_gen/visit/endpoint/generators/signature_generator.py +88 -0
  107. pyopenapi_gen/visit/endpoint/generators/url_args_generator.py +183 -0
  108. pyopenapi_gen/visit/endpoint/processors/__init__.py +1 -0
  109. pyopenapi_gen/visit/endpoint/processors/import_analyzer.py +76 -0
  110. pyopenapi_gen/visit/endpoint/processors/parameter_processor.py +171 -0
  111. pyopenapi_gen/visit/exception_visitor.py +52 -0
  112. pyopenapi_gen/visit/model/__init__.py +0 -0
  113. pyopenapi_gen/visit/model/alias_generator.py +89 -0
  114. pyopenapi_gen/visit/model/dataclass_generator.py +197 -0
  115. pyopenapi_gen/visit/model/enum_generator.py +200 -0
  116. pyopenapi_gen/visit/model/model_visitor.py +197 -0
  117. pyopenapi_gen/visit/visitor.py +97 -0
  118. pyopenapi_gen-0.8.3.dist-info/METADATA +224 -0
  119. pyopenapi_gen-0.8.3.dist-info/RECORD +122 -0
  120. pyopenapi_gen-0.8.3.dist-info/WHEEL +4 -0
  121. pyopenapi_gen-0.8.3.dist-info/entry_points.txt +2 -0
  122. pyopenapi_gen-0.8.3.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,60 @@
1
+ """Operation post-processing utilities.
2
+
3
+ Provides functions to finalize and enhance parsed operations.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import logging
9
+
10
+ from pyopenapi_gen import IROperation
11
+ from pyopenapi_gen.core.parsing.context import ParsingContext
12
+ from pyopenapi_gen.core.utils import NameSanitizer
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ def post_process_operation(op: IROperation, context: ParsingContext) -> None:
18
+ """Post-process an operation to finalize schema names and register them.
19
+
20
+ Contracts:
21
+ Preconditions:
22
+ - op is a valid IROperation
23
+ - context is properly initialized
24
+ Postconditions:
25
+ - All request body and response schemas are properly named and registered
26
+ """
27
+ assert isinstance(op, IROperation), "op must be an IROperation"
28
+ assert isinstance(context, ParsingContext), "context must be a ParsingContext"
29
+
30
+ # Handle request body schemas
31
+ if op.request_body:
32
+ for _, sch_val in op.request_body.content.items():
33
+ if not sch_val.name:
34
+ generated_rb_name = NameSanitizer.sanitize_class_name(op.operation_id + "Request")
35
+ sch_val.name = generated_rb_name
36
+ context.parsed_schemas[generated_rb_name] = sch_val
37
+ elif sch_val.name not in context.parsed_schemas:
38
+ context.parsed_schemas[sch_val.name] = sch_val
39
+
40
+ # Handle response schemas
41
+ for resp_val in op.responses:
42
+ for _, sch_resp_val in resp_val.content.items():
43
+ if sch_resp_val.name is None:
44
+ if getattr(sch_resp_val, "_from_unresolved_ref", False):
45
+ continue
46
+ is_streaming = getattr(resp_val, "stream", False)
47
+ if is_streaming:
48
+ continue
49
+
50
+ should_synthesize_name = False
51
+ if sch_resp_val.type == "object" and (sch_resp_val.properties or sch_resp_val.additional_properties):
52
+ should_synthesize_name = True
53
+
54
+ if should_synthesize_name:
55
+ generated_name = NameSanitizer.sanitize_class_name(op.operation_id + "Response")
56
+ sch_resp_val.name = generated_name
57
+ context.parsed_schemas[generated_name] = sch_resp_val
58
+
59
+ elif sch_resp_val.name and sch_resp_val.name not in context.parsed_schemas:
60
+ context.parsed_schemas[sch_resp_val.name] = sch_resp_val
@@ -0,0 +1,85 @@
1
+ """Request body parsers for OpenAPI IR transformation.
2
+
3
+ Provides functions to parse and transform OpenAPI request bodies into IR format.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import logging
9
+ from typing import Any, Dict, Mapping, Optional
10
+
11
+ from pyopenapi_gen import IRRequestBody, IRSchema
12
+ from pyopenapi_gen.core.parsing.context import ParsingContext
13
+ from pyopenapi_gen.core.parsing.schema_parser import _parse_schema
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ def parse_request_body(
19
+ rb_node: Mapping[str, Any],
20
+ raw_request_bodies: Mapping[str, Any],
21
+ context: ParsingContext,
22
+ operation_id: str,
23
+ ) -> Optional[IRRequestBody]:
24
+ """Parse a request body node into an IRRequestBody.
25
+
26
+ Contracts:
27
+ Preconditions:
28
+ - rb_node is a valid request body node
29
+ - raw_request_bodies contains component request bodies
30
+ - context is properly initialized
31
+ - operation_id is provided for naming
32
+ Postconditions:
33
+ - Returns a properly populated IRRequestBody or None if invalid
34
+ - All content media types are properly mapped to schemas
35
+ """
36
+ assert isinstance(rb_node, Mapping), "rb_node must be a Mapping"
37
+ assert isinstance(raw_request_bodies, Mapping), "raw_request_bodies must be a Mapping"
38
+ assert isinstance(context, ParsingContext), "context must be a ParsingContext"
39
+ assert operation_id, "operation_id must be provided"
40
+
41
+ # Handle $ref in request body
42
+ if (
43
+ "$ref" in rb_node
44
+ and isinstance(rb_node.get("$ref"), str)
45
+ and rb_node["$ref"].startswith("#/components/requestBodies/")
46
+ ):
47
+ ref_name = rb_node["$ref"].split("/")[-1]
48
+ resolved_rb_node = raw_request_bodies.get(ref_name, {}) or rb_node
49
+ else:
50
+ resolved_rb_node = rb_node
51
+
52
+ required_flag = bool(resolved_rb_node.get("required", False))
53
+ desc = resolved_rb_node.get("description")
54
+ content_map: Dict[str, IRSchema] = {}
55
+
56
+ parent_promo_name_for_req_body = f"{operation_id}RequestBody"
57
+
58
+ for mt, media in resolved_rb_node.get("content", {}).items():
59
+ media_schema_node = media.get("schema")
60
+ if (
61
+ isinstance(media_schema_node, Mapping)
62
+ and "$ref" not in media_schema_node
63
+ and (
64
+ media_schema_node.get("type") == "object"
65
+ or "properties" in media_schema_node
66
+ or "allOf" in media_schema_node
67
+ or "anyOf" in media_schema_node
68
+ or "oneOf" in media_schema_node
69
+ )
70
+ ):
71
+ content_map[mt] = _parse_schema(
72
+ parent_promo_name_for_req_body, media_schema_node, context, allow_self_reference=False
73
+ )
74
+ else:
75
+ content_map[mt] = _parse_schema(None, media_schema_node, context, allow_self_reference=False)
76
+
77
+ if not content_map:
78
+ return None
79
+
80
+ request_body = IRRequestBody(required=required_flag, content=content_map, description=desc)
81
+
82
+ # Post-condition check
83
+ assert request_body.content == content_map, "Request body content mismatch"
84
+
85
+ return request_body
@@ -0,0 +1,10 @@
1
+ """Parameter parsing utilities.
2
+
3
+ Functions to extract and transform parameters from raw OpenAPI specifications.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ from .parser import parse_parameter, resolve_parameter_node_if_ref
9
+
10
+ __all__ = ["parse_parameter", "resolve_parameter_node_if_ref"]
@@ -0,0 +1,121 @@
1
+ """Parameter parsers for OpenAPI IR transformation.
2
+
3
+ Provides functions to parse and transform OpenAPI parameters into IR format.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import logging
9
+ from typing import Any, Mapping, Optional, cast
10
+
11
+ from pyopenapi_gen import IRParameter, IRSchema
12
+ from pyopenapi_gen.core.parsing.context import ParsingContext
13
+ from pyopenapi_gen.core.parsing.schema_parser import _parse_schema
14
+ from pyopenapi_gen.core.utils import NameSanitizer
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ def resolve_parameter_node_if_ref(param_node_data: Mapping[str, Any], context: ParsingContext) -> Mapping[str, Any]:
20
+ """Resolve a parameter node if it's a reference.
21
+
22
+ Contracts:
23
+ Preconditions:
24
+ - param_node_data is a valid parameter node mapping
25
+ - context contains the required components information
26
+ Postconditions:
27
+ - Returns the resolved parameter node or the original if not a ref
28
+ - If a reference, the parameter is looked up in components
29
+ """
30
+ assert isinstance(param_node_data, Mapping), "param_node_data must be a Mapping"
31
+ assert isinstance(context, ParsingContext), "context must be a ParsingContext"
32
+
33
+ if "$ref" in param_node_data and isinstance(param_node_data.get("$ref"), str):
34
+ ref_path = param_node_data["$ref"]
35
+ if ref_path.startswith("#/components/parameters/"):
36
+ param_name = ref_path.split("/")[-1]
37
+ # Access raw_spec_components from the context
38
+ resolved_node = context.raw_spec_components.get("parameters", {}).get(param_name)
39
+ if resolved_node:
40
+ logger.debug(f"Resolved parameter $ref '{ref_path}' to '{param_name}'")
41
+ return cast(Mapping[str, Any], resolved_node)
42
+ else:
43
+ logger.warning(f"Could not resolve parameter $ref '{ref_path}'")
44
+ return param_node_data # Return original ref node if resolution fails
45
+
46
+ return param_node_data # Not a ref or not a component parameter ref
47
+
48
+
49
+ def parse_parameter(
50
+ node: Mapping[str, Any],
51
+ context: ParsingContext,
52
+ operation_id_for_promo: Optional[str] = None,
53
+ ) -> IRParameter:
54
+ """Convert an OpenAPI parameter node into IRParameter.
55
+
56
+ Contracts:
57
+ Preconditions:
58
+ - node is a valid parameter node with required fields
59
+ - context is properly initialized
60
+ - If node has a schema, it is a valid schema definition
61
+ Postconditions:
62
+ - Returns a properly populated IRParameter
63
+ - Complex parameter schemas are given appropriate names
64
+ """
65
+ assert isinstance(node, Mapping), "node must be a Mapping"
66
+ assert "name" in node, "Parameter node must have a name"
67
+ assert isinstance(context, ParsingContext), "context must be a ParsingContext"
68
+
69
+ sch = node.get("schema")
70
+ param_name = node["name"]
71
+
72
+ name_for_inline_param_schema: Optional[str] = None
73
+ if (
74
+ sch
75
+ and isinstance(sch, Mapping)
76
+ and "$ref" not in sch
77
+ and (sch.get("type") == "object" or "properties" in sch or "allOf" in sch or "anyOf" in sch or "oneOf" in sch)
78
+ ):
79
+ base_param_promo_name = f"{operation_id_for_promo}Param" if operation_id_for_promo else ""
80
+ name_for_inline_param_schema = f"{base_param_promo_name}{NameSanitizer.sanitize_class_name(param_name)}"
81
+
82
+ # For parameters, we want to avoid creating complex schemas for simple enum arrays
83
+ # Check if this is a simple enum array and handle it specially
84
+ if (
85
+ sch
86
+ and isinstance(sch, Mapping)
87
+ and sch.get("type") == "array"
88
+ and "items" in sch
89
+ and isinstance(sch["items"], Mapping)
90
+ and sch["items"].get("type") == "string"
91
+ and "enum" in sch["items"]
92
+ and "$ref" not in sch["items"]
93
+ ):
94
+ # This is an array of string enums - for parameters, we can treat this as List[str]
95
+ # rather than creating complex named schemas
96
+ schema_ir = IRSchema(
97
+ name=None,
98
+ type="array",
99
+ items=IRSchema(name=None, type="string", enum=sch["items"]["enum"]),
100
+ description=sch.get("description"),
101
+ )
102
+ else:
103
+ schema_ir = (
104
+ _parse_schema(name_for_inline_param_schema, sch, context, allow_self_reference=False)
105
+ if sch
106
+ else IRSchema(name=None)
107
+ )
108
+
109
+ param = IRParameter(
110
+ name=node["name"],
111
+ param_in=node.get("in", "query"),
112
+ required=bool(node.get("required", False)),
113
+ schema=schema_ir,
114
+ description=node.get("description"),
115
+ )
116
+
117
+ # Post-condition check
118
+ assert param.name == node["name"], "Parameter name mismatch"
119
+ assert param.schema is not None, "Parameter schema must be created"
120
+
121
+ return param
@@ -0,0 +1,10 @@
1
+ """Response parsing utilities.
2
+
3
+ Functions to extract and transform responses from raw OpenAPI specifications.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ from .parser import parse_response
9
+
10
+ __all__ = ["parse_response"]
@@ -0,0 +1,104 @@
1
+ """Response parsers for OpenAPI IR transformation.
2
+
3
+ Provides functions to parse and transform OpenAPI responses into IR format.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import logging
9
+ from typing import Any, Dict, Mapping
10
+
11
+ from pyopenapi_gen import IRResponse, IRSchema
12
+ from pyopenapi_gen.core.parsing.context import ParsingContext
13
+ from pyopenapi_gen.core.parsing.schema_parser import _parse_schema
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ def parse_response(
19
+ code: str,
20
+ node: Mapping[str, Any],
21
+ context: ParsingContext,
22
+ operation_id_for_promo: str,
23
+ ) -> IRResponse:
24
+ """Convert an OpenAPI response node into IRResponse.
25
+
26
+ Contracts:
27
+ Preconditions:
28
+ - code is a valid HTTP status code as string
29
+ - node is a valid response node
30
+ - context is properly initialized
31
+ - operation_id_for_promo is provided for naming inline schemas
32
+ Postconditions:
33
+ - Returns a properly populated IRResponse
34
+ - All content media types are properly mapped to schemas
35
+ - Stream flags are correctly set based on media types
36
+ """
37
+ assert isinstance(code, str), "code must be a string"
38
+ assert isinstance(node, Mapping), "node must be a Mapping"
39
+ assert isinstance(context, ParsingContext), "context must be a ParsingContext"
40
+ assert operation_id_for_promo, "operation_id_for_promo must be provided"
41
+
42
+ content: Dict[str, IRSchema] = {}
43
+ STREAM_FORMATS = {
44
+ "application/octet-stream": "octet-stream",
45
+ "text/event-stream": "event-stream",
46
+ "application/x-ndjson": "ndjson",
47
+ "application/json-seq": "json-seq",
48
+ "multipart/mixed": "multipart-mixed",
49
+ }
50
+ stream_flag = False
51
+ stream_format = None
52
+
53
+ # Construct a base name for promoting inline schemas within this response
54
+ parent_promo_name_for_resp_body = f"{operation_id_for_promo}{code}Response"
55
+
56
+ for mt, mn in node.get("content", {}).items():
57
+ if isinstance(mn, Mapping) and "$ref" in mn and mn["$ref"].startswith("#/components/schemas/"):
58
+ content[mt] = _parse_schema(None, mn, context, allow_self_reference=False)
59
+ elif isinstance(mn, Mapping) and "schema" in mn:
60
+ media_schema_node = mn["schema"]
61
+ if (
62
+ isinstance(media_schema_node, Mapping)
63
+ and "$ref" not in media_schema_node
64
+ and (
65
+ media_schema_node.get("type") == "object"
66
+ or "properties" in media_schema_node
67
+ or "allOf" in media_schema_node
68
+ or "anyOf" in media_schema_node
69
+ or "oneOf" in media_schema_node
70
+ )
71
+ ):
72
+ content[mt] = _parse_schema(
73
+ parent_promo_name_for_resp_body, media_schema_node, context, allow_self_reference=False
74
+ )
75
+ else:
76
+ content[mt] = _parse_schema(None, media_schema_node, context, allow_self_reference=False)
77
+ else:
78
+ content[mt] = IRSchema(name=None, _from_unresolved_ref=True)
79
+
80
+ fmt = STREAM_FORMATS.get(mt.lower())
81
+ if fmt:
82
+ stream_flag = True
83
+ stream_format = fmt
84
+
85
+ if not stream_flag:
86
+ for mt_val, schema_val in content.items():
87
+ if getattr(schema_val, "format", None) == "binary":
88
+ stream_flag = True
89
+ stream_format = "octet-stream"
90
+
91
+ response = IRResponse(
92
+ status_code=code,
93
+ description=node.get("description"),
94
+ content=content,
95
+ stream=stream_flag,
96
+ stream_format=stream_format,
97
+ )
98
+
99
+ # Post-condition checks
100
+ assert response.status_code == code, "Response status code mismatch"
101
+ assert response.content == content, "Response content mismatch"
102
+ assert response.stream == stream_flag, "Response stream flag mismatch"
103
+
104
+ return response
@@ -0,0 +1,11 @@
1
+ """Schema parsing and transformation utilities.
2
+
3
+ Functions to extract schemas from raw OpenAPI specifications and convert them
4
+ into IR format.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from .extractor import build_schemas, extract_inline_enums
10
+
11
+ __all__ = ["extract_inline_enums", "build_schemas"]
@@ -0,0 +1,184 @@
1
+ """Schema extractors for OpenAPI IR transformation.
2
+
3
+ Provides functions to extract and transform schemas from raw OpenAPI specs.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import copy
9
+ import logging
10
+ from typing import Any, Dict, Mapping
11
+
12
+ from pyopenapi_gen import IRSchema
13
+ from pyopenapi_gen.core.parsing.context import ParsingContext
14
+ from pyopenapi_gen.core.parsing.schema_parser import _parse_schema
15
+ from pyopenapi_gen.core.utils import NameSanitizer
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+
20
+ def build_schemas(raw_schemas: Dict[str, Mapping[str, Any]], raw_components: Mapping[str, Any]) -> ParsingContext:
21
+ """Build all named schemas up front, populating a ParsingContext.
22
+
23
+ Contracts:
24
+ Preconditions:
25
+ - raw_schemas is a valid dict containing schema definitions
26
+ - raw_components is a valid mapping containing component definitions
27
+ Postconditions:
28
+ - A ParsingContext is returned with all schemas parsed
29
+ - All schemas in raw_schemas are populated in context.parsed_schemas
30
+ """
31
+ assert isinstance(raw_schemas, dict), "raw_schemas must be a dict"
32
+ assert isinstance(raw_components, Mapping), "raw_components must be a Mapping"
33
+
34
+ context = ParsingContext(raw_spec_schemas=raw_schemas, raw_spec_components=raw_components)
35
+
36
+ # Build initial IR for all schemas found in components
37
+ for n, nd in raw_schemas.items():
38
+ if n not in context.parsed_schemas:
39
+ _parse_schema(n, nd, context, allow_self_reference=True)
40
+
41
+ # Post-condition check
42
+ assert all(n in context.parsed_schemas for n in raw_schemas), "Not all schemas were parsed"
43
+
44
+ return context
45
+
46
+
47
+ def extract_inline_array_items(schemas: Dict[str, IRSchema]) -> Dict[str, IRSchema]:
48
+ """Extract inline array item schemas as unique named schemas and update references.
49
+
50
+ Contracts:
51
+ Preconditions:
52
+ - schemas is a dict of IRSchema objects
53
+ Postconditions:
54
+ - Returns an updated schemas dict with extracted array item types
55
+ - All array item schemas have proper names
56
+ - No duplicate schema names are created
57
+ """
58
+ assert isinstance(schemas, dict), "schemas must be a dict"
59
+ assert all(isinstance(s, IRSchema) for s in schemas.values()), "all values must be IRSchema objects"
60
+
61
+ # Store original schema count for post-condition validation
62
+ original_schema_count = len(schemas)
63
+ original_schemas = set(schemas.keys())
64
+
65
+ new_item_schemas = {}
66
+ for schema_name, schema in list(schemas.items()):
67
+ # Check properties for array types
68
+ for prop_name, prop_schema in list(schema.properties.items()):
69
+ if prop_schema.type == "array" and prop_schema.items and not prop_schema.items.name:
70
+ # Only extract complex item schemas (objects and arrays), not simple primitives or references
71
+ items_schema = prop_schema.items
72
+ is_complex_item = (
73
+ items_schema.type == "object"
74
+ or items_schema.type == "array"
75
+ or items_schema.properties
76
+ or items_schema.any_of
77
+ or items_schema.one_of
78
+ or items_schema.all_of
79
+ )
80
+
81
+ if is_complex_item:
82
+ # Generate a descriptive name for the item schema using content-aware naming
83
+ # For arrays of complex objects, use the pattern: {Parent}{Property}Item
84
+ # For arrays in response wrappers (like "data" fields), consider the content type
85
+ if prop_name.lower() in ["data", "items", "results", "content"]:
86
+ # For generic wrapper properties, try to derive name from the item type or parent
87
+ if items_schema.type == "object" and schema_name.endswith("Response"):
88
+ # Pattern: MessageBatchResponse.data -> MessageItem
89
+ base_name = schema_name.replace("Response", "").replace("List", "")
90
+ item_schema_name = f"{base_name}Item"
91
+ else:
92
+ # Fallback to standard pattern
93
+ item_schema_name = (
94
+ f"{NameSanitizer.sanitize_class_name(schema_name)}"
95
+ f"{NameSanitizer.sanitize_class_name(prop_name)}Item"
96
+ )
97
+ else:
98
+ # Standard pattern for named properties
99
+ item_schema_name = (
100
+ f"{NameSanitizer.sanitize_class_name(schema_name)}"
101
+ f"{NameSanitizer.sanitize_class_name(prop_name)}Item"
102
+ )
103
+
104
+ base_item_name = item_schema_name
105
+ i = 1
106
+ while item_schema_name in schemas or item_schema_name in new_item_schemas:
107
+ item_schema_name = f"{base_item_name}{i}"
108
+ i += 1
109
+
110
+ # Create a copy of the item schema with a name
111
+ items_copy = copy.deepcopy(prop_schema.items)
112
+ items_copy.name = item_schema_name
113
+ new_item_schemas[item_schema_name] = items_copy
114
+
115
+ # Update the original array schema to reference the named item schema
116
+ prop_schema.items.name = item_schema_name
117
+
118
+ # Update the schemas dict with the new item schemas
119
+ schemas.update(new_item_schemas)
120
+
121
+ # Post-condition checks
122
+ assert len(schemas) >= original_schema_count, "Schemas count should not decrease"
123
+ assert original_schemas.issubset(set(schemas.keys())), "Original schemas should still be present"
124
+
125
+ return schemas
126
+
127
+
128
+ def extract_inline_enums(schemas: Dict[str, IRSchema]) -> Dict[str, IRSchema]:
129
+ """Extract inline property enums as unique schemas and update property references.
130
+
131
+ Contracts:
132
+ Preconditions:
133
+ - schemas is a dict of IRSchema objects
134
+ Postconditions:
135
+ - Returns an updated schemas dict with extracted enum types and array item types
136
+ - All property schemas with enums have proper names
137
+ - All array item schemas have proper names
138
+ - No duplicate schema names are created
139
+ """
140
+ assert isinstance(schemas, dict), "schemas must be a dict"
141
+ assert all(isinstance(s, IRSchema) for s in schemas.values()), "all values must be IRSchema objects"
142
+
143
+ # Store original schema count for post-condition validation
144
+ original_schema_count = len(schemas)
145
+ original_schemas = set(schemas.keys())
146
+
147
+ # First extract array item schemas so they can have enums extracted in the next step
148
+ schemas = extract_inline_array_items(schemas)
149
+
150
+ new_enums = {}
151
+ for schema_name, schema in list(schemas.items()):
152
+ for prop_name, prop_schema in list(schema.properties.items()):
153
+ if prop_schema.enum and not prop_schema.name:
154
+ enum_name = (
155
+ f"{NameSanitizer.sanitize_class_name(schema_name)}"
156
+ f"{NameSanitizer.sanitize_class_name(prop_name)}Enum"
157
+ )
158
+ base_enum_name = enum_name
159
+ i = 1
160
+ while enum_name in schemas or enum_name in new_enums:
161
+ enum_name = f"{base_enum_name}{i}"
162
+ i += 1
163
+
164
+ enum_schema = IRSchema(
165
+ name=enum_name,
166
+ type=prop_schema.type,
167
+ enum=copy.deepcopy(prop_schema.enum),
168
+ description=prop_schema.description or f"Enum for {schema_name}.{prop_name}",
169
+ )
170
+ new_enums[enum_name] = enum_schema
171
+
172
+ # Update the original property to reference the extracted enum
173
+ prop_schema.name = enum_name
174
+ prop_schema.type = enum_name # Make the property reference the enum by name
175
+ prop_schema.enum = None # Clear the inline enum since it's now extracted
176
+
177
+ # Update the schemas dict with the new enums
178
+ schemas.update(new_enums)
179
+
180
+ # Post-condition checks
181
+ assert len(schemas) >= original_schema_count, "Schemas count should not decrease"
182
+ assert original_schemas.issubset(set(schemas.keys())), "Original schemas should still be present"
183
+
184
+ return schemas
@@ -0,0 +1,64 @@
1
+ """
2
+ Pagination utilities for handling paginated API endpoints.
3
+
4
+ This module provides functions for working with paginated API responses,
5
+ turning them into convenient async iterators that automatically handle
6
+ fetching subsequent pages.
7
+ """
8
+
9
+ from typing import Any, AsyncIterator, Awaitable, Callable, Dict
10
+
11
+
12
+ def paginate_by_next(
13
+ fetch_page: Callable[..., Awaitable[Dict[str, Any]]],
14
+ items_key: str = "items",
15
+ next_key: str = "next",
16
+ **params: Any,
17
+ ) -> AsyncIterator[Any]:
18
+ """
19
+ Create an async iterator that yields items from paginated API responses.
20
+
21
+ This function creates a paginator that automatically handles fetching
22
+ subsequent pages of results by using a "next page token" pattern. It calls
23
+ the provided `fetch_page` function repeatedly with the given parameters,
24
+ updating the next token parameter between calls.
25
+
26
+ Args:
27
+ fetch_page: Async function to fetch a page of results
28
+ items_key: The key in the response dict where items are located (default: "items")
29
+ next_key: The key in the response dict for the next page token (default: "next")
30
+ **params: Initial parameters to pass to fetch_page
31
+
32
+ Returns:
33
+ An AsyncIterator that yields individual items from all pages
34
+
35
+ Example:
36
+ ```python
37
+ async def fetch_users_page(page_token=None, limit=100):
38
+ url = f"/users?limit={limit}"
39
+ if page_token:
40
+ url += f"&page_token={page_token}"
41
+ return await http_client.get(url)
42
+
43
+ async for user in paginate_by_next(fetch_users_page,
44
+ items_key="users",
45
+ next_key="page_token",
46
+ limit=50):
47
+ print(user["name"])
48
+ ```
49
+ """
50
+
51
+ async def _paginate() -> AsyncIterator[Any]:
52
+ while True:
53
+ result = await fetch_page(**params)
54
+ # result is expected to be a dict
55
+ # (assumed since fetch_page is typed to return Dict[str, Any])
56
+ items = result.get(items_key, [])
57
+ for item in items:
58
+ yield item
59
+ token = result.get(next_key)
60
+ if not token:
61
+ break
62
+ params[next_key] = token
63
+
64
+ return _paginate()
@@ -0,0 +1,13 @@
1
+ # Initialize the parsing module
2
+
3
+ # Expose the main schema parsing entry point if desired,
4
+ # otherwise, it remains internal (_parse_schema).
5
+ # from .schema_parser import _parse_schema as parse_openapi_schema_node
6
+
7
+ # Other parsers can be imported here if they need to be part of the public API
8
+ # of this sub-package, though most are internal helpers for _parse_schema.
9
+ from typing import List
10
+
11
+ __all__: List[str] = [
12
+ # "parse_openapi_schema_node", # Example if we were to expose it
13
+ ]
@@ -0,0 +1 @@
1
+ # common parsing utilities