awslabs.openapi-mcp-server 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- awslabs/__init__.py +16 -0
- awslabs/openapi_mcp_server/__init__.py +69 -0
- awslabs/openapi_mcp_server/api/__init__.py +18 -0
- awslabs/openapi_mcp_server/api/config.py +200 -0
- awslabs/openapi_mcp_server/auth/__init__.py +27 -0
- awslabs/openapi_mcp_server/auth/api_key_auth.py +185 -0
- awslabs/openapi_mcp_server/auth/auth_cache.py +190 -0
- awslabs/openapi_mcp_server/auth/auth_errors.py +206 -0
- awslabs/openapi_mcp_server/auth/auth_factory.py +146 -0
- awslabs/openapi_mcp_server/auth/auth_protocol.py +63 -0
- awslabs/openapi_mcp_server/auth/auth_provider.py +160 -0
- awslabs/openapi_mcp_server/auth/base_auth.py +218 -0
- awslabs/openapi_mcp_server/auth/basic_auth.py +171 -0
- awslabs/openapi_mcp_server/auth/bearer_auth.py +108 -0
- awslabs/openapi_mcp_server/auth/cognito_auth.py +538 -0
- awslabs/openapi_mcp_server/auth/register.py +100 -0
- awslabs/openapi_mcp_server/patch/__init__.py +17 -0
- awslabs/openapi_mcp_server/prompts/__init__.py +18 -0
- awslabs/openapi_mcp_server/prompts/generators/__init__.py +22 -0
- awslabs/openapi_mcp_server/prompts/generators/operation_prompts.py +642 -0
- awslabs/openapi_mcp_server/prompts/generators/workflow_prompts.py +257 -0
- awslabs/openapi_mcp_server/prompts/models.py +70 -0
- awslabs/openapi_mcp_server/prompts/prompt_manager.py +150 -0
- awslabs/openapi_mcp_server/server.py +511 -0
- awslabs/openapi_mcp_server/utils/__init__.py +18 -0
- awslabs/openapi_mcp_server/utils/cache_provider.py +249 -0
- awslabs/openapi_mcp_server/utils/config.py +35 -0
- awslabs/openapi_mcp_server/utils/error_handler.py +349 -0
- awslabs/openapi_mcp_server/utils/http_client.py +263 -0
- awslabs/openapi_mcp_server/utils/metrics_provider.py +503 -0
- awslabs/openapi_mcp_server/utils/openapi.py +217 -0
- awslabs/openapi_mcp_server/utils/openapi_validator.py +253 -0
- awslabs_openapi_mcp_server-0.1.1.dist-info/METADATA +418 -0
- awslabs_openapi_mcp_server-0.1.1.dist-info/RECORD +38 -0
- awslabs_openapi_mcp_server-0.1.1.dist-info/WHEEL +4 -0
- awslabs_openapi_mcp_server-0.1.1.dist-info/entry_points.txt +2 -0
- awslabs_openapi_mcp_server-0.1.1.dist-info/licenses/LICENSE +175 -0
- awslabs_openapi_mcp_server-0.1.1.dist-info/licenses/NOTICE +2 -0
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
"""Utilities for working with OpenAPI specifications."""
|
|
15
|
+
|
|
16
|
+
import httpx
|
|
17
|
+
import json
|
|
18
|
+
import tempfile
|
|
19
|
+
import time
|
|
20
|
+
from awslabs.openapi_mcp_server import logger
|
|
21
|
+
from awslabs.openapi_mcp_server.utils.cache_provider import cached
|
|
22
|
+
from awslabs.openapi_mcp_server.utils.openapi_validator import validate_openapi_spec
|
|
23
|
+
from pathlib import Path
|
|
24
|
+
from typing import Any, Dict, Optional
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def extract_api_name_from_spec(spec: Dict[str, Any]) -> Optional[str]:
|
|
28
|
+
"""Extract the API name from an OpenAPI specification.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
spec: The OpenAPI specification dictionary
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Optional[str]: The API name extracted from the specification, or None if not found
|
|
35
|
+
|
|
36
|
+
"""
|
|
37
|
+
if not spec or not isinstance(spec, dict):
|
|
38
|
+
logger.warning('Invalid OpenAPI spec format')
|
|
39
|
+
return None
|
|
40
|
+
|
|
41
|
+
# Extract from info.title
|
|
42
|
+
if 'info' in spec and isinstance(spec['info'], dict) and 'title' in spec['info']:
|
|
43
|
+
return spec['info']['title']
|
|
44
|
+
|
|
45
|
+
logger.debug('No API name found in OpenAPI spec')
|
|
46
|
+
return None
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
# Import yaml conditionally to avoid errors if it's not installed
|
|
50
|
+
try:
|
|
51
|
+
import yaml
|
|
52
|
+
except ImportError:
|
|
53
|
+
yaml = None # type: Optional[Any]
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
# Try to import prance, but don't fail if it's not installed
|
|
57
|
+
try:
|
|
58
|
+
from prance import ResolvingParser
|
|
59
|
+
|
|
60
|
+
PRANCE_AVAILABLE = True
|
|
61
|
+
except ImportError:
|
|
62
|
+
PRANCE_AVAILABLE = False
|
|
63
|
+
logger.warning('Prance library not found. Reference resolution will be limited.')
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@cached(ttl_seconds=3600) # Cache OpenAPI specs for 1 hour
|
|
67
|
+
def load_openapi_spec(url: str = '', path: str = '') -> Dict[str, Any]:
|
|
68
|
+
"""Load an OpenAPI specification from a URL or file path.
|
|
69
|
+
|
|
70
|
+
If prance is available, it will be used to resolve references in the OpenAPI spec.
|
|
71
|
+
Otherwise, falls back to basic JSON/YAML parsing.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
url: URL to the OpenAPI specification
|
|
75
|
+
path: Path to the OpenAPI specification file
|
|
76
|
+
|
|
77
|
+
Returns:
|
|
78
|
+
Dict[str, Any]: The parsed OpenAPI specification
|
|
79
|
+
|
|
80
|
+
Raises:
|
|
81
|
+
ValueError: If neither url nor path are provided
|
|
82
|
+
FileNotFoundError: If the file at path does not exist
|
|
83
|
+
httpx.HTTPError: If there's an HTTP error when fetching the spec
|
|
84
|
+
httpx.TimeoutException: If there's a timeout when fetching the spec
|
|
85
|
+
|
|
86
|
+
"""
|
|
87
|
+
if not url and not path:
|
|
88
|
+
logger.error('Neither URL nor path provided')
|
|
89
|
+
raise ValueError('Either url or path must be provided')
|
|
90
|
+
|
|
91
|
+
# Load from URL
|
|
92
|
+
if url:
|
|
93
|
+
logger.info(f'Fetching OpenAPI spec from URL: {url}')
|
|
94
|
+
last_exception = None
|
|
95
|
+
|
|
96
|
+
# Use retry logic for network resilience
|
|
97
|
+
for attempt in range(3):
|
|
98
|
+
try:
|
|
99
|
+
response = httpx.get(url, timeout=10.0)
|
|
100
|
+
response.raise_for_status()
|
|
101
|
+
|
|
102
|
+
if PRANCE_AVAILABLE:
|
|
103
|
+
logger.info('Using prance for reference resolution')
|
|
104
|
+
# Use prance for reference resolution if available
|
|
105
|
+
with tempfile.NamedTemporaryFile(suffix='.yaml', delete=False) as temp_file:
|
|
106
|
+
temp_path = temp_file.name
|
|
107
|
+
temp_file.write(response.content)
|
|
108
|
+
|
|
109
|
+
try:
|
|
110
|
+
parser = ResolvingParser(temp_path)
|
|
111
|
+
spec = parser.specification
|
|
112
|
+
|
|
113
|
+
# Clean up the temporary file
|
|
114
|
+
Path(temp_path).unlink(missing_ok=True)
|
|
115
|
+
except Exception as e:
|
|
116
|
+
logger.warning(
|
|
117
|
+
f'Failed to parse with prance: {e}. Falling back to basic parsing.'
|
|
118
|
+
)
|
|
119
|
+
# Clean up the temporary file
|
|
120
|
+
Path(temp_path).unlink(missing_ok=True)
|
|
121
|
+
# Fall back to basic parsing
|
|
122
|
+
spec = response.json()
|
|
123
|
+
else:
|
|
124
|
+
# Basic parsing without reference resolution
|
|
125
|
+
spec = response.json()
|
|
126
|
+
|
|
127
|
+
# Validate the spec
|
|
128
|
+
if validate_openapi_spec(spec):
|
|
129
|
+
return spec
|
|
130
|
+
else:
|
|
131
|
+
logger.error('Invalid OpenAPI specification')
|
|
132
|
+
raise ValueError('Invalid OpenAPI specification')
|
|
133
|
+
|
|
134
|
+
except (httpx.TimeoutException, httpx.HTTPError) as e:
|
|
135
|
+
last_exception = e
|
|
136
|
+
if attempt < 2: # Don't log on the last attempt
|
|
137
|
+
logger.warning(f'Attempt {attempt + 1} failed: {e}. Retrying...')
|
|
138
|
+
time.sleep(1 * (2**attempt)) # Exponential backoff
|
|
139
|
+
else:
|
|
140
|
+
# Re-raise the exception on the last attempt
|
|
141
|
+
logger.error(f'All retry attempts failed: {e}')
|
|
142
|
+
raise
|
|
143
|
+
|
|
144
|
+
# This will only be reached if all retries fail and no exception is raised
|
|
145
|
+
if last_exception:
|
|
146
|
+
raise last_exception
|
|
147
|
+
else:
|
|
148
|
+
raise httpx.HTTPError('All retry attempts failed')
|
|
149
|
+
|
|
150
|
+
# Load from file
|
|
151
|
+
if path:
|
|
152
|
+
spec_path = Path(path)
|
|
153
|
+
if not spec_path.exists():
|
|
154
|
+
logger.error(f'OpenAPI spec file not found: {path}')
|
|
155
|
+
raise FileNotFoundError(f'File not found: {path}')
|
|
156
|
+
|
|
157
|
+
logger.info(f'Loading OpenAPI spec from file: {path}')
|
|
158
|
+
try:
|
|
159
|
+
if PRANCE_AVAILABLE:
|
|
160
|
+
logger.info('Using prance for reference resolution')
|
|
161
|
+
# Use prance for reference resolution if available
|
|
162
|
+
try:
|
|
163
|
+
parser = ResolvingParser(path)
|
|
164
|
+
spec = parser.specification
|
|
165
|
+
except Exception as e:
|
|
166
|
+
logger.warning(
|
|
167
|
+
f'Failed to parse with prance: {e}. Falling back to basic parsing.'
|
|
168
|
+
)
|
|
169
|
+
# Fall back to basic parsing
|
|
170
|
+
with open(spec_path, 'r') as f:
|
|
171
|
+
content = f.read()
|
|
172
|
+
try:
|
|
173
|
+
spec = json.loads(content)
|
|
174
|
+
except json.JSONDecodeError as json_err:
|
|
175
|
+
# If it's not JSON, try to parse as YAML
|
|
176
|
+
try:
|
|
177
|
+
import yaml
|
|
178
|
+
|
|
179
|
+
spec = yaml.safe_load(content)
|
|
180
|
+
except ImportError:
|
|
181
|
+
logger.error('YAML parsing requires pyyaml to be installed')
|
|
182
|
+
raise ImportError(
|
|
183
|
+
"Required dependency 'pyyaml' not installed. Install it with: pip install pyyaml"
|
|
184
|
+
) from json_err
|
|
185
|
+
except Exception as yaml_err:
|
|
186
|
+
logger.error(f'Failed to parse YAML: {yaml_err}')
|
|
187
|
+
raise ValueError(f'Invalid YAML: {yaml_err}') from yaml_err
|
|
188
|
+
else:
|
|
189
|
+
# Basic parsing without reference resolution
|
|
190
|
+
with open(spec_path, 'r') as f:
|
|
191
|
+
content = f.read()
|
|
192
|
+
try:
|
|
193
|
+
spec = json.loads(content)
|
|
194
|
+
except json.JSONDecodeError as json_err:
|
|
195
|
+
# If it's not JSON, try to parse as YAML
|
|
196
|
+
try:
|
|
197
|
+
import yaml
|
|
198
|
+
|
|
199
|
+
spec = yaml.safe_load(content)
|
|
200
|
+
except ImportError:
|
|
201
|
+
logger.error('YAML parsing requires pyyaml to be installed')
|
|
202
|
+
raise ImportError(
|
|
203
|
+
"Required dependency 'pyyaml' not installed. Install it with: pip install pyyaml"
|
|
204
|
+
) from json_err
|
|
205
|
+
except Exception as yaml_err:
|
|
206
|
+
logger.error(f'Failed to parse YAML: {yaml_err}')
|
|
207
|
+
raise ValueError(f'Invalid YAML: {yaml_err}') from yaml_err
|
|
208
|
+
|
|
209
|
+
# Validate the spec
|
|
210
|
+
if validate_openapi_spec(spec):
|
|
211
|
+
return spec
|
|
212
|
+
else:
|
|
213
|
+
raise ValueError('Invalid OpenAPI specification')
|
|
214
|
+
|
|
215
|
+
except Exception as e:
|
|
216
|
+
logger.error(f'Failed to load OpenAPI spec from file: {path} - Error: {e}')
|
|
217
|
+
raise
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
"""OpenAPI validation utilities.
|
|
15
|
+
|
|
16
|
+
This module provides validation for OpenAPI specifications using openapi-core
|
|
17
|
+
when available, with a simple fallback implementation.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import os
|
|
21
|
+
from awslabs.openapi_mcp_server import logger
|
|
22
|
+
from typing import Any, Dict, List, Tuple
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# Check if openapi-core is available
|
|
26
|
+
openapi_core = None
|
|
27
|
+
try:
|
|
28
|
+
import openapi_core
|
|
29
|
+
|
|
30
|
+
OPENAPI_CORE_AVAILABLE = True
|
|
31
|
+
logger.debug('Using openapi-core for validation')
|
|
32
|
+
except ImportError:
|
|
33
|
+
OPENAPI_CORE_AVAILABLE = False
|
|
34
|
+
logger.debug('openapi-core not available, using simple validation')
|
|
35
|
+
|
|
36
|
+
# Use openapi-core if available and not explicitly disabled
|
|
37
|
+
USE_OPENAPI_CORE = OPENAPI_CORE_AVAILABLE and os.environ.get(
|
|
38
|
+
'MCP_USE_OPENAPI_CORE', 'true'
|
|
39
|
+
).lower() in ('true', '1', 'yes')
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def validate_openapi_spec(spec: Dict[str, Any]) -> bool:
|
|
43
|
+
"""Validate an OpenAPI specification.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
spec: The OpenAPI specification to validate
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
bool: True if the specification is valid, False otherwise
|
|
50
|
+
|
|
51
|
+
"""
|
|
52
|
+
# Basic validation first
|
|
53
|
+
# Check for required fields
|
|
54
|
+
if 'openapi' not in spec:
|
|
55
|
+
logger.error("Missing 'openapi' field in OpenAPI spec")
|
|
56
|
+
return False
|
|
57
|
+
|
|
58
|
+
if 'info' not in spec:
|
|
59
|
+
logger.error("Missing 'info' field in OpenAPI spec")
|
|
60
|
+
return False
|
|
61
|
+
|
|
62
|
+
if 'paths' not in spec:
|
|
63
|
+
logger.error("Missing 'paths' field in OpenAPI spec")
|
|
64
|
+
return False
|
|
65
|
+
|
|
66
|
+
# Check OpenAPI version
|
|
67
|
+
version = spec['openapi']
|
|
68
|
+
if not version.startswith('3.'):
|
|
69
|
+
logger.warning(f'OpenAPI version {version} may not be fully supported')
|
|
70
|
+
|
|
71
|
+
# Use openapi-core for additional validation if available
|
|
72
|
+
if USE_OPENAPI_CORE and openapi_core is not None:
|
|
73
|
+
try:
|
|
74
|
+
# Create spec object - this will validate the spec
|
|
75
|
+
if hasattr(openapi_core, 'create_spec'):
|
|
76
|
+
# Ignore type error since we're checking dynamically
|
|
77
|
+
openapi_core.create_spec(spec) # type: ignore
|
|
78
|
+
# For older versions of openapi-core
|
|
79
|
+
elif hasattr(openapi_core, 'Spec'):
|
|
80
|
+
spec_class = getattr(openapi_core, 'Spec')
|
|
81
|
+
if hasattr(spec_class, 'create'):
|
|
82
|
+
# Ignore type error since we're checking dynamically
|
|
83
|
+
spec_class.create(spec) # type: ignore
|
|
84
|
+
# For newer versions of openapi-core
|
|
85
|
+
elif hasattr(openapi_core, 'OpenAPISpec'):
|
|
86
|
+
# Ignore type error since we're checking dynamically
|
|
87
|
+
getattr(openapi_core, 'OpenAPISpec').create(spec) # type: ignore
|
|
88
|
+
else:
|
|
89
|
+
logger.warning('Unsupported openapi-core version - skipping additional validation')
|
|
90
|
+
logger.debug('OpenAPI spec validated with openapi-core')
|
|
91
|
+
except Exception as e:
|
|
92
|
+
logger.error(f'Error validating OpenAPI spec with openapi-core: {e}')
|
|
93
|
+
# We already did basic validation, so we'll still return True
|
|
94
|
+
return True
|
|
95
|
+
|
|
96
|
+
# Return True if we've passed all validations
|
|
97
|
+
return True
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def extract_api_structure(spec: Dict[str, Any]) -> Dict[str, Any]:
|
|
101
|
+
"""Extract the structure of an API from its OpenAPI specification.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
spec: The OpenAPI specification
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
Dict[str, Any]: A structured representation of the API
|
|
108
|
+
|
|
109
|
+
"""
|
|
110
|
+
result = {
|
|
111
|
+
'info': {
|
|
112
|
+
'title': spec.get('info', {}).get('title', 'Unknown API'),
|
|
113
|
+
'version': spec.get('info', {}).get('version', 'Unknown'),
|
|
114
|
+
'description': spec.get('info', {}).get('description', ''),
|
|
115
|
+
},
|
|
116
|
+
'paths': {},
|
|
117
|
+
'operations': [],
|
|
118
|
+
'schemas': [],
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
# Extract paths and operations
|
|
122
|
+
for path, path_item in spec.get('paths', {}).items():
|
|
123
|
+
path_info = {'path': path, 'methods': {}}
|
|
124
|
+
|
|
125
|
+
for method in ['get', 'post', 'put', 'delete', 'patch', 'options', 'head']:
|
|
126
|
+
if method in path_item:
|
|
127
|
+
operation = path_item[method]
|
|
128
|
+
operation_id = operation.get('operationId', f'{method}{path}')
|
|
129
|
+
summary = operation.get('summary', '')
|
|
130
|
+
description = operation.get('description', '')
|
|
131
|
+
|
|
132
|
+
# Extract parameters
|
|
133
|
+
parameters = []
|
|
134
|
+
for param in operation.get('parameters', []):
|
|
135
|
+
parameters.append(
|
|
136
|
+
{
|
|
137
|
+
'name': param.get('name', ''),
|
|
138
|
+
'in': param.get('in', ''),
|
|
139
|
+
'required': param.get('required', False),
|
|
140
|
+
'description': param.get('description', ''),
|
|
141
|
+
}
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
# Extract request body if present
|
|
145
|
+
request_body = None
|
|
146
|
+
if 'requestBody' in operation:
|
|
147
|
+
request_body = {
|
|
148
|
+
'required': operation['requestBody'].get('required', False),
|
|
149
|
+
'content_types': list(operation['requestBody'].get('content', {}).keys()),
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
# Extract responses
|
|
153
|
+
responses = {}
|
|
154
|
+
for status_code, response in operation.get('responses', {}).items():
|
|
155
|
+
responses[status_code] = {
|
|
156
|
+
'description': response.get('description', ''),
|
|
157
|
+
'content_types': list(response.get('content', {}).keys()),
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
# Add to path methods
|
|
161
|
+
path_info['methods'][method] = {
|
|
162
|
+
'operationId': operation_id,
|
|
163
|
+
'summary': summary,
|
|
164
|
+
'description': description,
|
|
165
|
+
'parameters': parameters,
|
|
166
|
+
'requestBody': request_body,
|
|
167
|
+
'responses': responses,
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
# Add to operations list
|
|
171
|
+
result['operations'].append(
|
|
172
|
+
{
|
|
173
|
+
'operationId': operation_id,
|
|
174
|
+
'method': method.upper(),
|
|
175
|
+
'path': path,
|
|
176
|
+
'summary': summary,
|
|
177
|
+
}
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
result['paths'][path] = path_info
|
|
181
|
+
|
|
182
|
+
# Extract schemas
|
|
183
|
+
if 'components' in spec and 'schemas' in spec['components']:
|
|
184
|
+
for schema_name, schema in spec['components']['schemas'].items():
|
|
185
|
+
result['schemas'].append(
|
|
186
|
+
{
|
|
187
|
+
'name': schema_name,
|
|
188
|
+
'type': schema.get('type', 'object'),
|
|
189
|
+
'properties': len(schema.get('properties', {})),
|
|
190
|
+
'required': schema.get('required', []),
|
|
191
|
+
}
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
return result
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def find_pagination_endpoints(spec: Dict[str, Any]) -> List[Tuple[str, str, Dict[str, Any]]]:
|
|
198
|
+
"""Find endpoints that likely support pagination.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
spec: The OpenAPI specification
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
List[Tuple[str, str, Dict[str, Any]]]: List of (path, method, operation) tuples
|
|
205
|
+
|
|
206
|
+
"""
|
|
207
|
+
pagination_endpoints = []
|
|
208
|
+
|
|
209
|
+
for path, path_item in spec.get('paths', {}).items():
|
|
210
|
+
for method, operation in path_item.items():
|
|
211
|
+
if method.lower() != 'get':
|
|
212
|
+
continue
|
|
213
|
+
|
|
214
|
+
# Check for pagination parameters
|
|
215
|
+
has_pagination = False
|
|
216
|
+
for param in operation.get('parameters', []):
|
|
217
|
+
param_name = param.get('name', '').lower()
|
|
218
|
+
if param_name in [
|
|
219
|
+
'page',
|
|
220
|
+
'limit',
|
|
221
|
+
'offset',
|
|
222
|
+
'size',
|
|
223
|
+
'per_page',
|
|
224
|
+
'pagesize',
|
|
225
|
+
'page_size',
|
|
226
|
+
'next',
|
|
227
|
+
'cursor',
|
|
228
|
+
]:
|
|
229
|
+
has_pagination = True
|
|
230
|
+
break
|
|
231
|
+
|
|
232
|
+
# Check for array responses
|
|
233
|
+
has_array_response = False
|
|
234
|
+
for response in operation.get('responses', {}).values():
|
|
235
|
+
for content_type, content in response.get('content', {}).items():
|
|
236
|
+
if 'application/json' in content_type:
|
|
237
|
+
schema = content.get('schema', {})
|
|
238
|
+
if schema.get('type') == 'array' or 'items' in schema:
|
|
239
|
+
has_array_response = True
|
|
240
|
+
break
|
|
241
|
+
# Check for common pagination response structures
|
|
242
|
+
properties = schema.get('properties', {})
|
|
243
|
+
for prop_name in properties:
|
|
244
|
+
if prop_name.lower() in ['items', 'data', 'results', 'content']:
|
|
245
|
+
prop_schema = properties[prop_name]
|
|
246
|
+
if prop_schema.get('type') == 'array' or 'items' in prop_schema:
|
|
247
|
+
has_array_response = True
|
|
248
|
+
break
|
|
249
|
+
|
|
250
|
+
if has_pagination or has_array_response:
|
|
251
|
+
pagination_endpoints.append((path, method, operation))
|
|
252
|
+
|
|
253
|
+
return pagination_endpoints
|