tooluniverse 1.0.3__py3-none-any.whl → 1.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of tooluniverse might be problematic. Click here for more details.

Files changed (32) hide show
  1. tooluniverse/__init__.py +17 -5
  2. tooluniverse/agentic_tool.py +268 -330
  3. tooluniverse/compose_scripts/output_summarizer.py +21 -15
  4. tooluniverse/data/agentic_tools.json +2 -2
  5. tooluniverse/data/odphp_tools.json +354 -0
  6. tooluniverse/data/output_summarization_tools.json +2 -2
  7. tooluniverse/default_config.py +1 -0
  8. tooluniverse/llm_clients.py +570 -0
  9. tooluniverse/mcp_tool_registry.py +3 -3
  10. tooluniverse/odphp_tool.py +226 -0
  11. tooluniverse/output_hook.py +92 -3
  12. tooluniverse/remote/boltz/boltz_mcp_server.py +2 -2
  13. tooluniverse/remote/uspto_downloader/uspto_downloader_mcp_server.py +2 -2
  14. tooluniverse/smcp.py +204 -112
  15. tooluniverse/smcp_server.py +23 -20
  16. tooluniverse/test/list_azure_openai_models.py +210 -0
  17. tooluniverse/test/test_agentic_tool_azure_models.py +91 -0
  18. tooluniverse/test/test_api_key_validation_min.py +64 -0
  19. tooluniverse/test/test_claude_sdk.py +86 -0
  20. tooluniverse/test/test_global_fallback.py +288 -0
  21. tooluniverse/test/test_hooks_direct.py +219 -0
  22. tooluniverse/test/test_odphp_tool.py +166 -0
  23. tooluniverse/test/test_openrouter_client.py +288 -0
  24. tooluniverse/test/test_stdio_hooks.py +285 -0
  25. tooluniverse/test/test_tool_finder.py +1 -1
  26. {tooluniverse-1.0.3.dist-info → tooluniverse-1.0.5.dist-info}/METADATA +101 -74
  27. {tooluniverse-1.0.3.dist-info → tooluniverse-1.0.5.dist-info}/RECORD +31 -19
  28. tooluniverse-1.0.5.dist-info/licenses/LICENSE +201 -0
  29. tooluniverse-1.0.3.dist-info/licenses/LICENSE +0 -21
  30. {tooluniverse-1.0.3.dist-info → tooluniverse-1.0.5.dist-info}/WHEEL +0 -0
  31. {tooluniverse-1.0.3.dist-info → tooluniverse-1.0.5.dist-info}/entry_points.txt +0 -0
  32. {tooluniverse-1.0.3.dist-info → tooluniverse-1.0.5.dist-info}/top_level.txt +0 -0
tooluniverse/smcp.py CHANGED
@@ -1389,7 +1389,7 @@ class SMCP(FastMCP):
1389
1389
  self.tool_finder_available = True
1390
1390
  self.tool_finder_type = "Tool_Finder_LLM"
1391
1391
  self.logger.info(
1392
- "✅ Tool_Finder_LLM (cost-optimized) available for advanced search"
1392
+ "✅ Tool_Finder_LLM available for advanced search"
1393
1393
  )
1394
1394
  return
1395
1395
 
@@ -1879,6 +1879,91 @@ class SMCP(FastMCP):
1879
1879
  except Exception:
1880
1880
  pass
1881
1881
 
1882
+ def _print_tooluniverse_banner(self):
1883
+ """Print ToolUniverse branding banner after FastMCP banner with dynamic information."""
1884
+ # Get transport info if available
1885
+ transport_display = getattr(self, '_transport_type', 'Unknown')
1886
+ server_url = getattr(self, '_server_url', 'N/A')
1887
+ tools_count = len(self._exposed_tools)
1888
+
1889
+ # Map transport types to display names
1890
+ transport_map = {
1891
+ 'stdio': 'STDIO',
1892
+ 'streamable-http': 'Streamable-HTTP',
1893
+ 'http': 'HTTP',
1894
+ 'sse': 'SSE'
1895
+ }
1896
+ transport_name = transport_map.get(transport_display, transport_display)
1897
+
1898
+ # Format lines with proper alignment (matching FastMCP style)
1899
+ # Each line should be exactly 75 characters (emoji takes 2 display widths but counts as 1 in len())
1900
+ transport_line = f" 📦 Transport: {transport_name}"
1901
+ server_line = f" 🔗 Server URL: {server_url}"
1902
+ tools_line = f" 🧰 Loaded Tools: {tools_count}"
1903
+
1904
+ # Pad to exactly 75 characters (emoji counts as 1 in len() but displays as 2)
1905
+ transport_line = transport_line + " " * (75 - len(transport_line))
1906
+ server_line = server_line + " " * (75 - len(server_line))
1907
+ tools_line = tools_line + " " * (75 - len(tools_line))
1908
+
1909
+ banner = f"""
1910
+ ╭────────────────────────────────────────────────────────────────────────────╮
1911
+ │ │
1912
+ │ 🧬 ToolUniverse SMCP Server 🧬 │
1913
+ │ │
1914
+ │ Bridging AI Agents with Scientific Computing Tools │
1915
+ │ │
1916
+ │{transport_line}│
1917
+ │{server_line}│
1918
+ │{tools_line}│
1919
+ │ │
1920
+ │ 🌐 Website: https://aiscientist.tools/ │
1921
+ │ 💻 GitHub: https://github.com/mims-harvard/ToolUniverse │
1922
+ │ │
1923
+ ╰────────────────────────────────────────────────────────────────────────────╯
1924
+ """
1925
+ print(banner)
1926
+
1927
+ def run(self, *args, **kwargs):
1928
+ """
1929
+ Override run method to display ToolUniverse banner after FastMCP banner.
1930
+
1931
+ This method intercepts the parent's run() call to inject our custom banner
1932
+ immediately after FastMCP displays its startup banner.
1933
+ """
1934
+ # Save transport information for banner display
1935
+ transport = kwargs.get('transport', args[0] if args else 'unknown')
1936
+ host = kwargs.get('host', '0.0.0.0')
1937
+ port = kwargs.get('port', 7000)
1938
+
1939
+ self._transport_type = transport
1940
+
1941
+ # Build server URL based on transport
1942
+ if transport == 'streamable-http' or transport == 'http':
1943
+ self._server_url = f"http://{host}:{port}/mcp"
1944
+ elif transport == 'sse':
1945
+ self._server_url = f"http://{host}:{port}"
1946
+ else:
1947
+ self._server_url = "N/A (stdio mode)"
1948
+
1949
+ # Use threading to print our banner shortly after FastMCP's banner
1950
+ import threading
1951
+ import time
1952
+
1953
+ def delayed_banner():
1954
+ """Print ToolUniverse banner with a small delay to appear after FastMCP banner."""
1955
+ time.sleep(1.0) # Delay to ensure FastMCP banner displays first
1956
+ self._print_tooluniverse_banner()
1957
+
1958
+ # Start banner thread only on first run
1959
+ if not hasattr(self, '_tooluniverse_banner_shown'):
1960
+ self._tooluniverse_banner_shown = True
1961
+ banner_thread = threading.Thread(target=delayed_banner, daemon=True)
1962
+ banner_thread.start()
1963
+
1964
+ # Call parent's run method (blocking call)
1965
+ return super().run(*args, **kwargs)
1966
+
1882
1967
  def run_simple(
1883
1968
  self,
1884
1969
  transport: Literal["stdio", "http", "sse"] = "http",
@@ -2085,120 +2170,127 @@ class SMCP(FastMCP):
2085
2170
  func_params = []
2086
2171
  param_annotations = {}
2087
2172
 
2088
- for param_name, param_info in properties.items():
2089
- param_type = param_info.get("type", "string")
2090
- param_description = param_info.get(
2091
- "description", f"{param_name} parameter"
2092
- )
2093
- is_required = param_name in required_params
2094
-
2095
- # Map JSON schema types to Python types and create appropriate Field
2096
- field_kwargs = {"description": param_description}
2097
-
2098
- if param_type == "string":
2099
- python_type = str
2100
- # For string type, don't add json_schema_extra - let Pydantic handle it
2101
- elif param_type == "integer":
2102
- python_type = int
2103
- # For integer type, don't add json_schema_extra - let Pydantic handle it
2104
- elif param_type == "number":
2105
- python_type = float
2106
- # For number type, don't add json_schema_extra - let Pydantic handle it
2107
- elif param_type == "boolean":
2108
- python_type = bool
2109
- # For boolean type, don't add json_schema_extra - let Pydantic handle it
2110
- elif param_type == "array":
2111
- python_type = list
2112
- # Add array-specific schema information only for complex cases
2113
- items_info = param_info.get("items", {})
2114
- if items_info:
2115
- # Clean up items definition - remove invalid fields
2116
- cleaned_items = items_info.copy()
2117
-
2118
- # Remove 'required' field from items (not valid in JSON Schema for array items)
2119
- if "required" in cleaned_items:
2120
- cleaned_items.pop("required")
2121
-
2122
- field_kwargs["json_schema_extra"] = {
2123
- "type": "array",
2124
- "items": cleaned_items,
2125
- }
2173
+ # Process parameters in two phases: required first, then optional
2174
+ # This ensures Python function signature validity (no default args before non-default)
2175
+ for is_required_phase in [True, False]:
2176
+ for param_name, param_info in properties.items():
2177
+ param_type = param_info.get("type", "string")
2178
+ param_description = param_info.get(
2179
+ "description", f"{param_name} parameter"
2180
+ )
2181
+ is_required = param_name in required_params
2182
+
2183
+ # Skip if not in current phase
2184
+ if is_required != is_required_phase:
2185
+ continue
2186
+
2187
+ # Map JSON schema types to Python types and create appropriate Field
2188
+ field_kwargs = {"description": param_description}
2189
+
2190
+ if param_type == "string":
2191
+ python_type = str
2192
+ # For string type, don't add json_schema_extra - let Pydantic handle it
2193
+ elif param_type == "integer":
2194
+ python_type = int
2195
+ # For integer type, don't add json_schema_extra - let Pydantic handle it
2196
+ elif param_type == "number":
2197
+ python_type = float
2198
+ # For number type, don't add json_schema_extra - let Pydantic handle it
2199
+ elif param_type == "boolean":
2200
+ python_type = bool
2201
+ # For boolean type, don't add json_schema_extra - let Pydantic handle it
2202
+ elif param_type == "array":
2203
+ python_type = list
2204
+ # Add array-specific schema information only for complex cases
2205
+ items_info = param_info.get("items", {})
2206
+ if items_info:
2207
+ # Clean up items definition - remove invalid fields
2208
+ cleaned_items = items_info.copy()
2209
+
2210
+ # Remove 'required' field from items (not valid in JSON Schema for array items)
2211
+ if "required" in cleaned_items:
2212
+ cleaned_items.pop("required")
2213
+
2214
+ field_kwargs["json_schema_extra"] = {
2215
+ "type": "array",
2216
+ "items": cleaned_items,
2217
+ }
2218
+ else:
2219
+ # If no items specified, default to string items
2220
+ field_kwargs["json_schema_extra"] = {
2221
+ "type": "array",
2222
+ "items": {"type": "string"},
2223
+ }
2224
+ elif param_type == "object":
2225
+ python_type = dict
2226
+ # Add object-specific schema information
2227
+ object_props = param_info.get("properties", {})
2228
+ if object_props:
2229
+ # Clean up the nested object properties - fix common schema issues
2230
+ cleaned_props = {}
2231
+ nested_required = []
2232
+
2233
+ for prop_name, prop_info in object_props.items():
2234
+ cleaned_prop = prop_info.copy()
2235
+
2236
+ # Fix string "True"/"False" in required field (common ToolUniverse issue)
2237
+ if "required" in cleaned_prop:
2238
+ req_value = cleaned_prop.pop("required")
2239
+ if req_value in ["True", "true", True]:
2240
+ nested_required.append(prop_name)
2241
+ # Remove the individual required field as it should be at object level
2242
+
2243
+ cleaned_props[prop_name] = cleaned_prop
2244
+
2245
+ # Create proper JSON schema for nested object
2246
+ object_schema = {"type": "object", "properties": cleaned_props}
2247
+
2248
+ # Add required array at object level if there are required fields
2249
+ if nested_required:
2250
+ object_schema["required"] = nested_required
2251
+
2252
+ field_kwargs["json_schema_extra"] = object_schema
2126
2253
  else:
2127
- # If no items specified, default to string items
2128
- field_kwargs["json_schema_extra"] = {
2129
- "type": "array",
2130
- "items": {"type": "string"},
2131
- }
2132
- elif param_type == "object":
2133
- python_type = dict
2134
- # Add object-specific schema information
2135
- object_props = param_info.get("properties", {})
2136
- if object_props:
2137
- # Clean up the nested object properties - fix common schema issues
2138
- cleaned_props = {}
2139
- nested_required = []
2140
-
2141
- for prop_name, prop_info in object_props.items():
2142
- cleaned_prop = prop_info.copy()
2143
-
2144
- # Fix string "True"/"False" in required field (common ToolUniverse issue)
2145
- if "required" in cleaned_prop:
2146
- req_value = cleaned_prop.pop("required")
2147
- if req_value in ["True", "true", True]:
2148
- nested_required.append(prop_name)
2149
- # Remove the individual required field as it should be at object level
2150
-
2151
- cleaned_props[prop_name] = cleaned_prop
2152
-
2153
- # Create proper JSON schema for nested object
2154
- object_schema = {"type": "object", "properties": cleaned_props}
2155
-
2156
- # Add required array at object level if there are required fields
2157
- if nested_required:
2158
- object_schema["required"] = nested_required
2159
-
2160
- field_kwargs["json_schema_extra"] = object_schema
2161
- else:
2162
- # For unknown types, default to string and only add type info if it's truly unknown
2163
- python_type = str
2164
- if param_type not in [
2165
- "string",
2166
- "integer",
2167
- "number",
2168
- "boolean",
2169
- "array",
2170
- "object",
2171
- ]:
2172
- field_kwargs["json_schema_extra"] = {"type": param_type}
2173
-
2174
- # Create Pydantic Field with enhanced schema information
2175
- pydantic_field = Field(**field_kwargs)
2176
-
2177
- if is_required:
2178
- # Required parameter with description and schema info
2179
- annotated_type = Annotated[python_type, pydantic_field]
2180
- param_annotations[param_name] = annotated_type
2181
- func_params.append(
2182
- inspect.Parameter(
2183
- param_name,
2184
- inspect.Parameter.POSITIONAL_OR_KEYWORD,
2185
- annotation=annotated_type,
2254
+ # For unknown types, default to string and only add type info if it's truly unknown
2255
+ python_type = str
2256
+ if param_type not in [
2257
+ "string",
2258
+ "integer",
2259
+ "number",
2260
+ "boolean",
2261
+ "array",
2262
+ "object",
2263
+ ]:
2264
+ field_kwargs["json_schema_extra"] = {"type": param_type}
2265
+
2266
+ # Create Pydantic Field with enhanced schema information
2267
+ pydantic_field = Field(**field_kwargs)
2268
+
2269
+ if is_required:
2270
+ # Required parameter with description and schema info
2271
+ annotated_type = Annotated[python_type, pydantic_field]
2272
+ param_annotations[param_name] = annotated_type
2273
+ func_params.append(
2274
+ inspect.Parameter(
2275
+ param_name,
2276
+ inspect.Parameter.POSITIONAL_OR_KEYWORD,
2277
+ annotation=annotated_type,
2278
+ )
2186
2279
  )
2187
- )
2188
- else:
2189
- # Optional parameter with description, schema info and default value
2190
- annotated_type = Annotated[
2191
- Union[python_type, type(None)], pydantic_field
2192
- ]
2193
- param_annotations[param_name] = annotated_type
2194
- func_params.append(
2195
- inspect.Parameter(
2196
- param_name,
2197
- inspect.Parameter.POSITIONAL_OR_KEYWORD,
2198
- default=None,
2199
- annotation=annotated_type,
2280
+ else:
2281
+ # Optional parameter with description, schema info and default value
2282
+ annotated_type = Annotated[
2283
+ Union[python_type, type(None)], pydantic_field
2284
+ ]
2285
+ param_annotations[param_name] = annotated_type
2286
+ func_params.append(
2287
+ inspect.Parameter(
2288
+ param_name,
2289
+ inspect.Parameter.POSITIONAL_OR_KEYWORD,
2290
+ default=None,
2291
+ annotation=annotated_type,
2292
+ )
2200
2293
  )
2201
- )
2202
2294
 
2203
2295
  # Create the async function with dynamic signature
2204
2296
  if not properties:
@@ -111,7 +111,6 @@ Examples:
111
111
  auto_expose_tools=True,
112
112
  search_enabled=True,
113
113
  max_workers=5,
114
- stateless_http=True, # Enable stateless mode for MCPAutoLoaderTool compatibility
115
114
  hooks_enabled=hooks_enabled,
116
115
  hook_config=hook_config,
117
116
  hook_type=args.hook_type,
@@ -146,8 +145,8 @@ Examples:
146
145
  # Start with specific categories
147
146
  tooluniverse-stdio --categories uniprot ChEMBL opentarget
148
147
 
149
- # Disable hooks
150
- tooluniverse-stdio --no-hooks
148
+ # Enable hooks
149
+ tooluniverse-stdio --hooks
151
150
 
152
151
  # Use FileSaveHook instead of SummarizationHook
153
152
  tooluniverse-stdio --hook-type FileSaveHook
@@ -254,8 +253,8 @@ Examples:
254
253
  # Server configuration (stdio-specific)
255
254
  parser.add_argument(
256
255
  "--name",
257
- default="SMCP ToolUniverse Server",
258
- help="Server name (default: SMCP ToolUniverse Server)",
256
+ default="ToolUniverse SMCP Server",
257
+ help="Server name (default: ToolUniverse SMCP Server)",
259
258
  )
260
259
  parser.add_argument(
261
260
  "--no-search",
@@ -272,18 +271,17 @@ Examples:
272
271
  "--verbose", "-v", action="store_true", help="Enable verbose logging"
273
272
  )
274
273
 
275
- # Hook configuration options (default enabled for stdio)
274
+ # Hook configuration options (default disabled for stdio)
276
275
  hook_group = parser.add_argument_group("Hook Configuration")
277
276
  hook_group.add_argument(
278
- "--no-hooks",
277
+ "--hooks",
279
278
  action="store_true",
280
- help="Disable output processing hooks (default: enabled for stdio)",
279
+ help="Enable output processing hooks (default: disabled for stdio)",
281
280
  )
282
281
  hook_group.add_argument(
283
282
  "--hook-type",
284
283
  choices=["SummarizationHook", "FileSaveHook"],
285
- default="SummarizationHook",
286
- help="Hook type to use (default: SummarizationHook)",
284
+ help="Hook type to use (default: SummarizationHook when hooks are enabled)",
287
285
  )
288
286
  hook_group.add_argument(
289
287
  "--hook-config-file",
@@ -493,16 +491,23 @@ Examples:
493
491
  hook_config = json.load(f)
494
492
  print(f"🔗 Hook config loaded from: {args.hook_config_file}")
495
493
 
496
- # Determine hook settings (default enabled for stdio)
497
- hooks_enabled = not args.no_hooks
494
+ # Determine hook settings (default disabled for stdio)
495
+ hooks_enabled = (
496
+ args.hooks or args.hook_type is not None or hook_config is not None
497
+ )
498
+
499
+ # Set default hook type if hooks are enabled but no type specified
500
+ hook_type = args.hook_type
501
+ if hooks_enabled and hook_type is None:
502
+ hook_type = "SummarizationHook"
498
503
  if hooks_enabled:
499
- if args.hook_type:
500
- print(f"🔗 Hooks enabled: {args.hook_type}")
504
+ if hook_type:
505
+ print(f"🔗 Hooks enabled: {hook_type}")
501
506
  elif hook_config:
502
507
  hook_count = len(hook_config.get("hooks", []))
503
508
  print(f"🔗 Hooks enabled: {hook_count} custom hooks")
504
509
  else:
505
- print(f"🔗 Hooks enabled: {args.hook_type} (default)")
510
+ print("🔗 Hooks enabled: default configuration")
506
511
  else:
507
512
  print("🔗 Hooks disabled")
508
513
 
@@ -522,10 +527,9 @@ Examples:
522
527
  exclude_tool_types=exclude_tool_types,
523
528
  search_enabled=not args.no_search,
524
529
  max_workers=args.max_workers,
525
- stateless_http=True, # Enable stateless mode for MCPAutoLoaderTool compatibility
526
530
  hooks_enabled=hooks_enabled,
527
531
  hook_config=hook_config,
528
- hook_type=args.hook_type,
532
+ hook_type=hook_type,
529
533
  )
530
534
 
531
535
  # Run server with stdio transport (forced)
@@ -679,8 +683,8 @@ Examples:
679
683
  )
680
684
  parser.add_argument(
681
685
  "--name",
682
- default="SMCP ToolUniverse Server",
683
- help="Server name (default: SMCP ToolUniverse Server)",
686
+ default="ToolUniverse SMCP Server",
687
+ help="Server name (default: ToolUniverse SMCP Server)",
684
688
  )
685
689
  parser.add_argument(
686
690
  "--no-search",
@@ -950,7 +954,6 @@ Examples:
950
954
  exclude_tool_types=exclude_tool_types,
951
955
  search_enabled=not args.no_search,
952
956
  max_workers=args.max_workers,
953
- stateless_http=True, # Enable stateless mode for MCPAutoLoaderTool compatibility
954
957
  hooks_enabled=hooks_enabled,
955
958
  hook_config=hook_config,
956
959
  hook_type=args.hook_type,
@@ -0,0 +1,210 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ List Azure OpenAI deployments (deployed models) for the current resource.
4
+
5
+ Environment variables used:
6
+ - AZURE_OPENAI_ENDPOINT (required) e.g., https://<your-resource>.openai.azure.com
7
+ - AZURE_OPENAI_API_KEY (required)
8
+ - AZURE_OPENAI_API_VERSION (optional; default: 2024-12-01-preview)
9
+
10
+ This script queries the Azure OpenAI data-plane deployments endpoint:
11
+ GET {endpoint}/openai/deployments?api-version={api_version}
12
+ It also tries alternative paths and versions if the first attempt fails.
13
+ If REST fails, it falls back to listing models via the SDK (client.models.list()).
14
+
15
+ CLI options:
16
+ --rest-only Only use REST
17
+ --sdk-only Only use SDK fallback
18
+ --raw Print raw JSON result for REST (when available)
19
+ --versions v1 v2 Override API versions to try (space-separated)
20
+ """
21
+
22
+ import argparse
23
+ import json
24
+ import os
25
+ import sys
26
+ from typing import Any, Dict, List, Optional, Tuple
27
+
28
+ import requests
29
+
30
+
31
+ DEFAULT_VERSIONS = [
32
+ # Common recent versions
33
+ "2024-12-01-preview",
34
+ "2024-10-21",
35
+ # Add more if needed
36
+ ]
37
+
38
+
39
+ def try_rest_once(
40
+ endpoint: str, api_key: str, api_version: str, path_variant: str
41
+ ) -> Tuple[Optional[List[Dict[str, Any]]], Optional[Dict[str, Any]], Optional[str]]:
42
+ url = endpoint.rstrip("/") + f"{path_variant}?api-version={api_version}"
43
+ headers = {"api-key": api_key, "Content-Type": "application/json"}
44
+ try:
45
+ resp = requests.get(url, headers=headers, timeout=15)
46
+ resp.raise_for_status()
47
+ data = resp.json()
48
+ items = data.get("data") or data.get("value") or []
49
+ deployments: List[Dict[str, Any]] = []
50
+ for item in items:
51
+ deployments.append(
52
+ {
53
+ "id": item.get("id") or item.get("name"),
54
+ "name": item.get("name") or item.get("id"),
55
+ "model": (
56
+ (item.get("model") or {}).get("name")
57
+ if isinstance(item.get("model"), dict)
58
+ else item.get("model")
59
+ ),
60
+ "model_format": item.get("model_format"),
61
+ "created": item.get("created"),
62
+ "status": item.get("status")
63
+ or item.get("provisioningState")
64
+ or item.get("provisioning_state"),
65
+ "properties": item.get("properties"),
66
+ }
67
+ )
68
+ return deployments, data, None
69
+ except Exception as e:
70
+ return None, None, f"{e} (url: {url})"
71
+
72
+
73
+ def list_deployments_via_rest(
74
+ endpoint: str, api_key: str, versions: List[str]
75
+ ) -> Tuple[List[Dict[str, Any]], Optional[Dict[str, Any]], List[str]]:
76
+ errors: List[str] = []
77
+ raw: Optional[Dict[str, Any]] = None
78
+ # Try two common path variants
79
+ path_variants = ["/openai/deployments", "/deployments"]
80
+ for v in versions:
81
+ for pv in path_variants:
82
+ deployments, raw_json, err = try_rest_once(endpoint, api_key, v, pv)
83
+ if deployments is not None:
84
+ raw = raw_json
85
+ return deployments, raw, errors
86
+ if err:
87
+ errors.append(f"{v} {pv}: {err}")
88
+ return [], raw, errors
89
+
90
+
91
+ def list_models_via_sdk(
92
+ endpoint: str, api_key: str, api_version: str
93
+ ) -> List[Dict[str, Any]]:
94
+ try:
95
+ from openai import AzureOpenAI # type: ignore
96
+ except Exception as e: # pragma: no cover
97
+ raise RuntimeError("Failed to import openai AzureOpenAI client: %s" % e)
98
+
99
+ client = AzureOpenAI(
100
+ azure_endpoint=endpoint,
101
+ api_key=api_key,
102
+ api_version=api_version,
103
+ )
104
+ resp = client.models.list()
105
+ data = getattr(resp, "data", None) or []
106
+ models: List[Dict[str, Any]] = []
107
+ for m in data:
108
+ models.append(
109
+ {
110
+ "id": getattr(m, "id", None) or getattr(m, "root", None),
111
+ "owned_by": getattr(m, "owned_by", None),
112
+ "created": getattr(m, "created", None),
113
+ }
114
+ )
115
+ return models
116
+
117
+
118
+ def main() -> None:
119
+ parser = argparse.ArgumentParser(description="List Azure OpenAI deployments/models")
120
+ parser.add_argument("--rest-only", action="store_true", help="Use REST only")
121
+ parser.add_argument("--sdk-only", action="store_true", help="Use SDK only")
122
+ parser.add_argument(
123
+ "--raw", action="store_true", help="Print raw JSON from REST when available"
124
+ )
125
+ parser.add_argument(
126
+ "--versions", nargs="*", help="API versions to try for REST (override)"
127
+ )
128
+ args = parser.parse_args()
129
+
130
+ endpoint = os.getenv("AZURE_OPENAI_ENDPOINT")
131
+ api_key = os.getenv("AZURE_OPENAI_API_KEY")
132
+ env_version = os.getenv("AZURE_OPENAI_API_VERSION")
133
+
134
+ if not endpoint or not api_key:
135
+ print("ERROR: Missing required environment variables.")
136
+ print(" - AZURE_OPENAI_ENDPOINT (current: %s)" % (endpoint or "<unset>"))
137
+ print(
138
+ " - AZURE_OPENAI_API_KEY (current: %s)"
139
+ % ("<set>" if api_key else "<unset>")
140
+ )
141
+ sys.exit(1)
142
+
143
+ versions = (
144
+ args.versions
145
+ if args.versions
146
+ else ([env_version] if env_version else DEFAULT_VERSIONS)
147
+ )
148
+
149
+ print("Listing Azure OpenAI deployments for resource:")
150
+ print(f" - Endpoint : {endpoint}")
151
+ print(f" - Versions : {', '.join([v for v in versions if v])}")
152
+ print()
153
+
154
+ deployments: List[Dict[str, Any]] = []
155
+ rest_errors: List[str] = []
156
+ raw: Optional[Dict[str, Any]] = None
157
+
158
+ if not args.sdk_only:
159
+ deployments, raw, rest_errors = list_deployments_via_rest(
160
+ endpoint, api_key, [v for v in versions if v]
161
+ )
162
+ if deployments:
163
+ print(f"Found {len(deployments)} deployment(s) via REST:")
164
+ for d in deployments:
165
+ print("- Deployment:")
166
+ print(f" name : {d.get('name')}")
167
+ print(f" id : {d.get('id')}")
168
+ print(f" model : {d.get('model')}")
169
+ print(f" status : {d.get('status')}")
170
+ if args.raw and raw is not None:
171
+ print("\nRaw JSON (REST):")
172
+ print(json.dumps(raw, indent=2, ensure_ascii=False))
173
+ print(
174
+ "\nTip: Use the 'name' (deployment name) as model_id in your requests."
175
+ )
176
+ return
177
+ else:
178
+ print("No deployments found via REST or REST not available.")
179
+ if rest_errors:
180
+ print("\nREST attempt details (for debugging):")
181
+ for e in rest_errors[:5]: # limit output
182
+ print(" -", e)
183
+ print()
184
+
185
+ if not args.rest_only:
186
+ api_version_for_sdk = env_version or DEFAULT_VERSIONS[0]
187
+ try:
188
+ models = list_models_via_sdk(endpoint, api_key, api_version_for_sdk)
189
+ if models:
190
+ print(f"Found {len(models)} model(s) via SDK:")
191
+ for m in models[:200]:
192
+ print("- Model (SDK):")
193
+ print(f" id : {m.get('id')}")
194
+ if m.get("owned_by") is not None:
195
+ print(f" owned_by: {m.get('owned_by')}")
196
+ if m.get("created") is not None:
197
+ print(f" created : {m.get('created')}")
198
+ print()
199
+ print(
200
+ "Note: SDK list may show global IDs; real calls require the deployment name."
201
+ )
202
+ else:
203
+ print("No models found via SDK either.")
204
+ except Exception as e:
205
+ print("ERROR: Unable to list models via SDK: %s" % e)
206
+ sys.exit(2)
207
+
208
+
209
+ if __name__ == "__main__":
210
+ main()