api-to-tools 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,12 @@
1
+ __pycache__/
2
+ *.pyc
3
+ *.pyo
4
+ *.egg-info/
5
+ dist/
6
+ build/
7
+ .eggs/
8
+ .env
9
+ .DS_Store
10
+ .ruff_cache/
11
+ .pytest_cache/
12
+ node_modules/
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 SonAIengine
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,31 @@
1
+ Metadata-Version: 2.4
2
+ Name: api-to-tools
3
+ Version: 0.1.0
4
+ Summary: Universal library that converts any API (OpenAPI, WSDL/SOAP, GraphQL, gRPC, AsyncAPI) into LLM-callable tool definitions
5
+ Project-URL: Homepage, https://github.com/SonAIengine/api-to-tools
6
+ Project-URL: Repository, https://github.com/SonAIengine/api-to-tools
7
+ Author: SonAIengine
8
+ License-Expression: MIT
9
+ License-File: LICENSE
10
+ Keywords: api,function-calling,graphql,grpc,llm,mcp,openapi,soap,swagger,tools,wsdl
11
+ Classifier: Development Status :: 3 - Alpha
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Topic :: Software Development :: Libraries
16
+ Requires-Python: >=3.10
17
+ Requires-Dist: graphql-core>=3.2
18
+ Requires-Dist: grpcio-tools>=1.60
19
+ Requires-Dist: httpx>=0.27
20
+ Requires-Dist: mcp>=1.0
21
+ Requires-Dist: openapi-spec-validator>=0.7
22
+ Requires-Dist: prance>=23.6
23
+ Requires-Dist: pyyaml>=6.0
24
+ Requires-Dist: xmltodict>=0.13
25
+ Requires-Dist: zeep>=4.2
26
+ Provides-Extra: asyncapi
27
+ Requires-Dist: asyncapi-parser>=0.1; extra == 'asyncapi'
28
+ Provides-Extra: dev
29
+ Requires-Dist: pytest-asyncio>=0.23; extra == 'dev'
30
+ Requires-Dist: pytest>=8.0; extra == 'dev'
31
+ Requires-Dist: ruff>=0.4; extra == 'dev'
File without changes
@@ -0,0 +1,49 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "api-to-tools"
7
+ version = "0.1.0"
8
+ description = "Universal library that converts any API (OpenAPI, WSDL/SOAP, GraphQL, gRPC, AsyncAPI) into LLM-callable tool definitions"
9
+ readme = "README.md"
10
+ license = "MIT"
11
+ requires-python = ">=3.10"
12
+ authors = [{ name = "SonAIengine" }]
13
+ keywords = ["api", "openapi", "swagger", "wsdl", "soap", "graphql", "grpc", "mcp", "llm", "tools", "function-calling"]
14
+ classifiers = [
15
+ "Development Status :: 3 - Alpha",
16
+ "Intended Audience :: Developers",
17
+ "License :: OSI Approved :: MIT License",
18
+ "Programming Language :: Python :: 3",
19
+ "Topic :: Software Development :: Libraries",
20
+ ]
21
+ dependencies = [
22
+ "httpx>=0.27",
23
+ "pyyaml>=6.0",
24
+ "prance>=23.6", # OpenAPI parser
25
+ "openapi-spec-validator>=0.7",
26
+ "zeep>=4.2", # WSDL/SOAP
27
+ "graphql-core>=3.2", # GraphQL
28
+ "grpcio-tools>=1.60", # gRPC / protobuf
29
+ "mcp>=1.0", # MCP SDK
30
+ "xmltodict>=0.13", # XML <-> dict
31
+ ]
32
+
33
+ [project.optional-dependencies]
34
+ asyncapi = ["asyncapi-parser>=0.1"]
35
+ dev = ["pytest>=8.0", "pytest-asyncio>=0.23", "ruff>=0.4"]
36
+
37
+ [project.scripts]
38
+ api-to-tools = "api_to_tools.cli:main"
39
+
40
+ [project.urls]
41
+ Homepage = "https://github.com/SonAIengine/api-to-tools"
42
+ Repository = "https://github.com/SonAIengine/api-to-tools"
43
+
44
+ [tool.hatch.build.targets.wheel]
45
+ packages = ["src/api_to_tools"]
46
+
47
+ [tool.ruff]
48
+ target-version = "py310"
49
+ line-length = 120
@@ -0,0 +1,23 @@
1
+ """Universal API-to-LLM-tools converter."""
2
+
3
+ from api_to_tools.types import Tool, ToolParameter, DetectionResult, SpecType, Protocol
4
+ from api_to_tools.core import discover, to_tools, execute
5
+ from api_to_tools.adapters.formats import to_function_calling, to_anthropic_tools
6
+ from api_to_tools.utils import group_by_tag, group_by_method, summarize, search_tools
7
+
8
+ __all__ = [
9
+ "discover",
10
+ "to_tools",
11
+ "execute",
12
+ "to_function_calling",
13
+ "to_anthropic_tools",
14
+ "group_by_tag",
15
+ "group_by_method",
16
+ "summarize",
17
+ "search_tools",
18
+ "Tool",
19
+ "ToolParameter",
20
+ "DetectionResult",
21
+ "SpecType",
22
+ "Protocol",
23
+ ]
@@ -0,0 +1,6 @@
1
+ """Output adapters."""
2
+
3
+ from api_to_tools.adapters.mcp_adapter import create_mcp_server
4
+ from api_to_tools.adapters.formats import to_function_calling, to_anthropic_tools
5
+
6
+ __all__ = ["create_mcp_server", "to_function_calling", "to_anthropic_tools"]
@@ -0,0 +1,54 @@
1
+ """Format converters for LLM tool calling."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from api_to_tools.types import Tool
6
+
7
+
8
+ def to_function_calling(tools: list[Tool]) -> list[dict]:
9
+ """Convert to OpenAI function calling format."""
10
+ return [
11
+ {
12
+ "type": "function",
13
+ "function": {
14
+ "name": tool.name,
15
+ "description": tool.description,
16
+ "parameters": {
17
+ "type": "object",
18
+ "properties": {
19
+ p.name: {
20
+ "type": p.type,
21
+ **({"description": p.description} if p.description else {}),
22
+ **({"enum": p.enum} if p.enum else {}),
23
+ }
24
+ for p in tool.parameters
25
+ },
26
+ "required": [p.name for p in tool.parameters if p.required],
27
+ },
28
+ },
29
+ }
30
+ for tool in tools
31
+ ]
32
+
33
+
34
+ def to_anthropic_tools(tools: list[Tool]) -> list[dict]:
35
+ """Convert to Anthropic tool_use format."""
36
+ return [
37
+ {
38
+ "name": tool.name,
39
+ "description": tool.description,
40
+ "input_schema": {
41
+ "type": "object",
42
+ "properties": {
43
+ p.name: {
44
+ "type": p.type,
45
+ **({"description": p.description} if p.description else {}),
46
+ **({"enum": p.enum} if p.enum else {}),
47
+ }
48
+ for p in tool.parameters
49
+ },
50
+ "required": [p.name for p in tool.parameters if p.required],
51
+ },
52
+ }
53
+ for tool in tools
54
+ ]
@@ -0,0 +1,44 @@
1
+ """MCP Server adapter."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+
7
+ from mcp.server.fastmcp import FastMCP
8
+
9
+ from api_to_tools.types import Tool
10
+ from api_to_tools.executors import get_executor
11
+
12
+
13
+ def create_mcp_server(tools: list[Tool], name: str = "api-to-tools") -> FastMCP:
14
+ """Create an MCP server from a list of tools."""
15
+ mcp = FastMCP(name)
16
+
17
+ for tool in tools:
18
+ # Capture tool in closure
19
+ _register_tool(mcp, tool)
20
+
21
+ return mcp
22
+
23
+
24
+ def _register_tool(mcp: FastMCP, tool: Tool):
25
+ """Register a single tool on the MCP server."""
26
+
27
+ # Build parameter annotations for FastMCP
28
+ param_descriptions = {p.name: p.description or "" for p in tool.parameters}
29
+ description = tool.description
30
+ if param_descriptions:
31
+ param_lines = [f" - {k}: {v}" for k, v in param_descriptions.items() if v]
32
+ if param_lines:
33
+ description += "\n\nParameters:\n" + "\n".join(param_lines)
34
+
35
+ @mcp.tool(name=tool.name, description=description)
36
+ def _handler(**kwargs) -> str:
37
+ try:
38
+ executor = get_executor(tool.protocol)
39
+ result = executor(tool, kwargs)
40
+ if isinstance(result.data, str):
41
+ return result.data
42
+ return json.dumps(result.data, ensure_ascii=False, indent=2, default=str)
43
+ except Exception as e:
44
+ return f"Error: {e}"
@@ -0,0 +1,131 @@
1
+ """CLI entry point."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import argparse
6
+ import json
7
+ import sys
8
+
9
+ from api_to_tools.core import discover
10
+ from api_to_tools.adapters.formats import to_function_calling, to_anthropic_tools
11
+ from api_to_tools.utils import summarize, search_tools
12
+
13
+
14
+ def cmd_serve(args):
15
+ from api_to_tools.adapters.mcp_adapter import create_mcp_server
16
+
17
+ print(f"Discovering API at {args.url}...", file=sys.stderr)
18
+ tools = discover(args.url)
19
+ print(f"Found {len(tools)} tools. Starting MCP server '{args.name}'...", file=sys.stderr)
20
+ for t in tools:
21
+ print(f" - {t.name}", file=sys.stderr)
22
+
23
+ mcp = create_mcp_server(tools, name=args.name)
24
+ mcp.run(transport="stdio")
25
+
26
+
27
+ def cmd_list(args):
28
+ tools = discover(args.url)
29
+
30
+ if args.tag:
31
+ tools = [t for t in tools if any(args.tag.lower() in tag.lower() for tag in t.tags)]
32
+ if args.method:
33
+ tools = [t for t in tools if t.method.upper() == args.method.upper()]
34
+ if args.search:
35
+ tools = search_tools(tools, args.search)
36
+
37
+ for t in tools:
38
+ params = ", ".join(f"{p.name}{'!' if p.required else '?'}:{p.type}" for p in t.parameters)
39
+ print(f"[{t.method:<8}] {t.name}")
40
+ if t.description:
41
+ print(f" {t.description[:80]}")
42
+ if params:
43
+ print(f" ({params})")
44
+
45
+ print(f"\nTotal: {len(tools)} tools", file=sys.stderr)
46
+
47
+
48
+ def cmd_info(args):
49
+ print(f"Discovering API at {args.url}...", file=sys.stderr)
50
+ tools = discover(args.url)
51
+ summary = summarize(tools)
52
+
53
+ print(f"Total tools: {summary['total']}\n")
54
+
55
+ print("By Protocol:")
56
+ for k, v in summary["by_protocol"].items():
57
+ print(f" {k}: {v}")
58
+
59
+ print("\nBy Method:")
60
+ for k, v in summary["by_method"].items():
61
+ print(f" {k}: {v}")
62
+
63
+ print("\nBy Tag:")
64
+ tags = list(summary["by_tag"].items())
65
+ for k, v in tags[:20]:
66
+ print(f" {k}: {v}")
67
+ if len(tags) > 20:
68
+ print(f" ... and {len(tags) - 20} more tags")
69
+
70
+
71
+ def cmd_export(args):
72
+ tools = discover(args.url)
73
+
74
+ if args.tag:
75
+ tools = [t for t in tools if any(args.tag.lower() in tag.lower() for tag in t.tags)]
76
+ if args.search:
77
+ tools = search_tools(tools, args.search)
78
+
79
+ if args.format == "openai":
80
+ output = to_function_calling(tools)
81
+ elif args.format == "anthropic":
82
+ output = to_anthropic_tools(tools)
83
+ else:
84
+ from dataclasses import asdict
85
+ output = [asdict(t) for t in tools]
86
+
87
+ print(json.dumps(output, indent=2, ensure_ascii=False))
88
+
89
+
90
+ def main():
91
+ parser = argparse.ArgumentParser(
92
+ prog="api-to-tools",
93
+ description="Convert any API into LLM-callable tools",
94
+ )
95
+ sub = parser.add_subparsers(dest="command")
96
+
97
+ # serve
98
+ p_serve = sub.add_parser("serve", help="Start MCP server (stdio)")
99
+ p_serve.add_argument("url", help="API spec URL or website URL")
100
+ p_serve.add_argument("--name", default="api-to-tools", help="MCP server name")
101
+
102
+ # list
103
+ p_list = sub.add_parser("list", help="List discovered tools")
104
+ p_list.add_argument("url")
105
+ p_list.add_argument("--tag", help="Filter by tag")
106
+ p_list.add_argument("--method", help="Filter by HTTP method")
107
+ p_list.add_argument("--search", help="Search by name/description")
108
+
109
+ # info
110
+ p_info = sub.add_parser("info", help="Show API summary")
111
+ p_info.add_argument("url")
112
+
113
+ # export
114
+ p_export = sub.add_parser("export", help="Export tool definitions")
115
+ p_export.add_argument("url")
116
+ p_export.add_argument("--format", choices=["openai", "anthropic", "json"], default="json")
117
+ p_export.add_argument("--tag", help="Filter by tag")
118
+ p_export.add_argument("--search", help="Search filter")
119
+
120
+ args = parser.parse_args()
121
+
122
+ if not args.command:
123
+ parser.print_help()
124
+ sys.exit(1)
125
+
126
+ cmd_map = {"serve": cmd_serve, "list": cmd_list, "info": cmd_info, "export": cmd_export}
127
+ cmd_map[args.command](args)
128
+
129
+
130
+ if __name__ == "__main__":
131
+ main()
@@ -0,0 +1,65 @@
1
+ """Core functions."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from api_to_tools.types import Tool, DetectionResult, ExecutionResult
6
+ from api_to_tools.detector import detect
7
+ from api_to_tools.parsers import get_parser
8
+ from api_to_tools.executors import get_executor
9
+
10
+
11
+ def discover(url: str, **kwargs) -> list[Tool]:
12
+ """Discover and parse API spec from a URL into tools.
13
+
14
+ >>> tools = discover("https://date.nager.at/openapi/v3.json")
15
+ >>> tools = discover("https://petstore.swagger.io") # auto-detects
16
+ """
17
+ # Separate detect options from filter options
18
+ detect_kwargs = {k: kwargs[k] for k in ("timeout", "probe_paths") if k in kwargs}
19
+ detection = detect(url, **detect_kwargs)
20
+ return to_tools(detection, **kwargs)
21
+
22
+
23
+ def to_tools(detection: DetectionResult, **kwargs) -> list[Tool]:
24
+ """Parse a detected spec into tools."""
25
+ parser = get_parser(detection.type)
26
+ # WSDL/GraphQL need the URL, not raw content (libraries fetch themselves)
27
+ if detection.type in ("wsdl", "graphql"):
28
+ input_data = detection.spec_url
29
+ else:
30
+ input_data = detection.raw_content or detection.spec_url
31
+ tools = parser(input_data, source_url=detection.spec_url)
32
+
33
+ # Apply base URL override
34
+ base_url = kwargs.get("base_url")
35
+ if base_url:
36
+ import re
37
+ for t in tools:
38
+ t.endpoint = re.sub(r"^https?://[^/]+", base_url, t.endpoint)
39
+
40
+ # Filters
41
+ tags = kwargs.get("tags")
42
+ if tags:
43
+ tools = [t for t in tools if any(tag in t.tags for tag in tags)]
44
+
45
+ methods = kwargs.get("methods")
46
+ if methods:
47
+ upper = [m.upper() for m in methods]
48
+ tools = [t for t in tools if t.method.upper() in upper]
49
+
50
+ path_filter = kwargs.get("path_filter")
51
+ if path_filter:
52
+ import re
53
+ tools = [t for t in tools if re.search(path_filter, t.endpoint)]
54
+
55
+ return tools
56
+
57
+
58
+ def execute(tool: Tool, args: dict) -> ExecutionResult:
59
+ """Execute a tool with given arguments.
60
+
61
+ >>> tools = discover("https://date.nager.at/openapi/v3.json")
62
+ >>> result = execute(tools[0], {"countryCode": "KR"})
63
+ """
64
+ executor = get_executor(tool.protocol)
65
+ return executor(tool, args)
@@ -0,0 +1,181 @@
1
+ """Auto-detect API spec type from a URL."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import re
6
+ from urllib.parse import urljoin, urlparse
7
+
8
+ import httpx
9
+
10
+ from api_to_tools.types import DetectionResult, SpecType
11
+
12
+ WELL_KNOWN_PATHS: dict[SpecType, list[str]] = {
13
+ "openapi": [
14
+ "/openapi.json", "/openapi.yaml", "/openapi/v3.json",
15
+ "/swagger.json", "/swagger.yaml",
16
+ "/api-docs", "/v2/api-docs", "/v3/api-docs",
17
+ "/.well-known/openapi",
18
+ "/docs/openapi.json", "/docs/swagger.json",
19
+ "/swagger/v1/swagger.json", "/swagger/v2/swagger.json",
20
+ "/api/swagger.json", "/api/openapi.json",
21
+ "/spec.json", "/api/spec.json",
22
+ "/api-docs.json", "/api/api-docs",
23
+ ],
24
+ "wsdl": ["?wsdl", "?WSDL", "/ws?wsdl", "/services?wsdl"],
25
+ "graphql": ["/graphql", "/.well-known/graphql"],
26
+ "grpc": [],
27
+ "asyncapi": ["/asyncapi.json", "/asyncapi.yaml"],
28
+ "jsonrpc": ["/rpc", "/jsonrpc"],
29
+ }
30
+
31
+ GRAPHQL_PROBE_QUERY = '{"query":"{ __schema { types { name } } }"}'
32
+
33
+
34
+ def _detect_from_content(content: str, content_type: str = "") -> SpecType | None:
35
+ """Detect spec type from response content."""
36
+ # JSON
37
+ if "json" in content_type or content.lstrip().startswith("{"):
38
+ try:
39
+ import json
40
+ data = json.loads(content)
41
+ if "openapi" in data or "swagger" in data:
42
+ return "openapi"
43
+ if "asyncapi" in data:
44
+ return "asyncapi"
45
+ if isinstance(data.get("data"), dict) and "__schema" in data["data"]:
46
+ return "graphql"
47
+ except (json.JSONDecodeError, TypeError):
48
+ pass
49
+
50
+ # XML
51
+ if "xml" in content_type or content.lstrip().startswith("<"):
52
+ if "<definitions" in content or "<wsdl:definitions" in content:
53
+ return "wsdl"
54
+
55
+ # YAML
56
+ if "openapi:" in content or "swagger:" in content:
57
+ return "openapi"
58
+ if "asyncapi:" in content:
59
+ return "asyncapi"
60
+
61
+ return None
62
+
63
+
64
+ def _extract_spec_url_from_html(html: str, base_url: str, client: httpx.Client, timeout: float) -> str | None:
65
+ """Extract spec URL from Swagger UI / Redoc HTML."""
66
+ # Swagger UI: url: "..."
67
+ m = re.search(r'url:\s*["\']([^"\']+)["\']', html)
68
+ if m:
69
+ return urljoin(base_url, m.group(1))
70
+
71
+ # Redoc: spec-url="..."
72
+ m = re.search(r'spec-url=["\']([^"\']+)["\']', html)
73
+ if m:
74
+ return urljoin(base_url, m.group(1))
75
+
76
+ # Link tag
77
+ m = re.search(r'<link[^>]+rel=["\']api-definition["\'][^>]+href=["\']([^"\']+)["\']', html)
78
+ if m:
79
+ return urljoin(base_url, m.group(1))
80
+
81
+ # Swagger UI initializer JS
82
+ for script_match in re.finditer(r'<script[^>]+src=["\']([^"\']*(?:initializer|config)[^"\']*)["\']', html, re.I):
83
+ try:
84
+ js_url = urljoin(base_url, script_match.group(1))
85
+ js_res = client.get(js_url, timeout=timeout)
86
+ if js_res.is_success:
87
+ js = js_res.text
88
+ # url: "https://..." pattern
89
+ url_m = re.search(r'url:\s*["\'](https?://[^"\']+|/[^"\'/][^"\']*)["\']', js)
90
+ if url_m:
91
+ return urljoin(base_url, url_m.group(1))
92
+ # Variable assignment pattern
93
+ var_m = re.search(
94
+ r'(?:const|let|var)\s+\w*(?:url|definition|spec|swagger|openapi)\w*\s*=\s*["\'](https?://[^"\']+\.json[^"\']*)["\']',
95
+ js, re.I,
96
+ )
97
+ if var_m:
98
+ return urljoin(base_url, var_m.group(1))
99
+ except httpx.HTTPError:
100
+ continue
101
+
102
+ return None
103
+
104
+
105
+ def _probe(url: str, client: httpx.Client, timeout: float) -> DetectionResult | None:
106
+ """Probe a URL for an API spec."""
107
+ try:
108
+ res = client.get(url, timeout=timeout, follow_redirects=True,
109
+ headers={"Accept": "application/json, application/xml, text/yaml, */*"})
110
+ if not res.is_success:
111
+ return None
112
+
113
+ ct = res.headers.get("content-type", "")
114
+ content = res.text
115
+ spec_type = _detect_from_content(content, ct)
116
+
117
+ if spec_type:
118
+ return DetectionResult(type=spec_type, spec_url=url, raw_content=content, content_type=ct)
119
+
120
+ # HTML -> extract spec URL
121
+ if "html" in ct:
122
+ spec_url = _extract_spec_url_from_html(content, url, client, timeout)
123
+ if spec_url:
124
+ return _probe(spec_url, client, timeout)
125
+
126
+ return None
127
+ except (httpx.HTTPError, httpx.InvalidURL):
128
+ return None
129
+
130
+
131
+ def _probe_graphql(base_url: str, client: httpx.Client, timeout: float) -> DetectionResult | None:
132
+ """Try GraphQL introspection."""
133
+ urls = [base_url] + [urljoin(base_url + "/", p.lstrip("/")) for p in WELL_KNOWN_PATHS["graphql"]]
134
+ seen = set()
135
+ for url in urls:
136
+ if url in seen:
137
+ continue
138
+ seen.add(url)
139
+ try:
140
+ res = client.post(url, content=GRAPHQL_PROBE_QUERY,
141
+ headers={"Content-Type": "application/json"}, timeout=timeout)
142
+ if res.is_success and "__schema" in res.text:
143
+ return DetectionResult(type="graphql", spec_url=url)
144
+ except (httpx.HTTPError, httpx.InvalidURL):
145
+ continue
146
+ return None
147
+
148
+
149
+ def detect(url: str, *, timeout: float = 10.0, probe_paths: bool = True) -> DetectionResult:
150
+ """Discover API spec from a URL.
151
+
152
+ Tries direct detection, then probes well-known paths.
153
+ """
154
+ with httpx.Client() as client:
155
+ # GraphQL endpoint heuristic
156
+ if "graphql" in url or url.endswith("/gql"):
157
+ result = _probe_graphql(url.rstrip("/"), client, timeout)
158
+ if result:
159
+ return result
160
+
161
+ # Direct probe
162
+ result = _probe(url, client, timeout)
163
+ if result:
164
+ return result
165
+
166
+ # Well-known paths
167
+ if probe_paths:
168
+ base = url.rstrip("/")
169
+ for paths in WELL_KNOWN_PATHS.values():
170
+ for path in paths:
171
+ probe_url = f"{base}{path}" if path.startswith("?") else urljoin(base + "/", path.lstrip("/"))
172
+ result = _probe(probe_url, client, timeout)
173
+ if result:
174
+ return result
175
+
176
+ # GraphQL (POST-based)
177
+ result = _probe_graphql(base, client, timeout)
178
+ if result:
179
+ return result
180
+
181
+ raise ValueError(f"Could not detect API spec at {url}. Try providing the direct spec URL.")
@@ -0,0 +1,21 @@
1
+ """API executors."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from api_to_tools.types import Protocol
6
+ from api_to_tools.executors.rest import execute_rest
7
+ from api_to_tools.executors.soap import execute_soap
8
+ from api_to_tools.executors.graphql import execute_graphql
9
+
10
+ EXECUTORS = {
11
+ "rest": execute_rest,
12
+ "soap": execute_soap,
13
+ "graphql": execute_graphql,
14
+ }
15
+
16
+
17
+ def get_executor(protocol: Protocol):
18
+ executor = EXECUTORS.get(protocol)
19
+ if not executor:
20
+ raise NotImplementedError(f"Executor for '{protocol}' is not yet implemented")
21
+ return executor