uipath-openai-agents 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ """UiPath OpenAI Agents SDK."""
2
+
3
+ from .chat import UiPathChatOpenAI
4
+ from .middlewares import register_middleware
5
+
6
+ __version__ = "0.1.0"
7
+ __all__ = ["register_middleware", "UiPathChatOpenAI"]
@@ -0,0 +1 @@
1
+ __all__: list[str] = []
@@ -0,0 +1,21 @@
1
+ # Agent Code Patterns Reference
2
+
3
+ This document provides practical code patterns for building UiPath coded agents using the **OpenAI Agents SDK**.
4
+
5
+ ---
6
+
7
+ ## Documentation Structure
8
+
9
+ This documentation is split into multiple files for efficient context loading. Load only the files you need:
10
+
11
+ 1. **@.agent/REQUIRED_STRUCTURE.md** - OpenAI Agent patterns and templates
12
+ - **When to load:** Creating a new OpenAI agent or understanding required patterns
13
+ - **Contains:** Agent definition patterns (`Agent`, `Runner`, `@tool`), registration in `openai_agents.json`, input/output format (simple message/messages), UiPath integration setup
14
+
15
+ 2. **@.agent/SDK_REFERENCE.md** - OpenAI Agents SDK and UiPath integration reference
16
+ - **When to load:** Working with OpenAI Agents SDK methods, tools, handoffs, or UiPath services
17
+ - **Contains:** Complete OpenAI Agents SDK API (Agent, Runner, tool decorator), UiPathChatOpenAI client, supported models, UiPath services integration patterns
18
+
19
+ 3. **@.agent/CLI_REFERENCE.md** - CLI commands documentation
20
+ - **When to load:** Working with `uipath init`, `uipath run agent`, or deployment commands
21
+ - **Contains:** Command syntax for OpenAI agents, options, input formats (`{"message": "..."}` or `{"messages": [...]}`), debug mode, usage examples
@@ -0,0 +1,28 @@
1
+ from agents import Agent
2
+ from openai import OpenAI
3
+
4
+
5
+ def get_weather(location: str) -> str:
6
+ """Get the current weather for a location.
7
+
8
+ Args:
9
+ location: The city and state, e.g. "San Francisco, CA"
10
+
11
+ Returns:
12
+ Weather information for the location
13
+ """
14
+ # This is a mock implementation
15
+ return f"The weather in {location} is sunny and 72�F"
16
+
17
+
18
+ # Initialize the OpenAI client
19
+ client = OpenAI()
20
+
21
+ # Create an agent with tools
22
+ agent = Agent(
23
+ name="weather_agent",
24
+ instructions="You are a helpful weather assistant. Use the get_weather tool to provide weather information.",
25
+ model="gpt-4o-mini",
26
+ tools=[get_weather],
27
+ client=client,
28
+ )
@@ -0,0 +1,5 @@
1
+ {
2
+ "agents": {
3
+ "agent": "main.py:agent"
4
+ }
5
+ }
@@ -0,0 +1,81 @@
1
+ import os
2
+ import shutil
3
+
4
+ import click
5
+ from uipath._cli._utils._console import ConsoleLogger
6
+ from uipath._cli.middlewares import MiddlewareResult
7
+
8
+ console = ConsoleLogger()
9
+
10
+
11
+ def generate_script(target_directory):
12
+ template_script_path = os.path.join(
13
+ os.path.dirname(__file__), "_templates/main.py.template"
14
+ )
15
+ target_path = os.path.join(target_directory, "main.py")
16
+
17
+ shutil.copyfile(template_script_path, target_path)
18
+
19
+ template_openai_agents_json_path = os.path.join(
20
+ os.path.dirname(__file__), "_templates/openai_agents.json.template"
21
+ )
22
+ target_path = os.path.join(target_directory, "openai_agents.json")
23
+ shutil.copyfile(template_openai_agents_json_path, target_path)
24
+
25
+ # Copy OpenAI-specific AGENTS.md template (overrides generic one)
26
+ template_agents_md_path = os.path.join(
27
+ os.path.dirname(__file__), "_templates/AGENTS.md.template"
28
+ )
29
+ target_agents_md = os.path.join(target_directory, "AGENTS.md")
30
+ if os.path.exists(template_agents_md_path):
31
+ shutil.copyfile(template_agents_md_path, target_agents_md)
32
+
33
+
34
+ def generate_pyproject(target_directory, project_name):
35
+ project_toml_path = os.path.join(target_directory, "pyproject.toml")
36
+ toml_content = f"""[project]
37
+ name = "{project_name}"
38
+ version = "0.0.1"
39
+ description = "{project_name}"
40
+ authors = [{{ name = "John Doe", email = "john.doe@myemail.com" }}]
41
+ dependencies = [
42
+ "uipath-openai-agents>=0.1.0",
43
+ "openai>=1.0.0"
44
+ ]
45
+ requires-python = ">=3.11"
46
+ """
47
+
48
+ with open(project_toml_path, "w") as f:
49
+ f.write(toml_content)
50
+
51
+
52
+ def openai_agents_new_middleware(name: str) -> MiddlewareResult:
53
+ """Middleware to create demo OpenAI agent"""
54
+
55
+ directory = os.getcwd()
56
+
57
+ try:
58
+ with console.spinner(f"Creating new agent {name} in current directory ..."):
59
+ generate_pyproject(directory, name)
60
+ generate_script(directory)
61
+ console.success("Created 'main.py' file.")
62
+ console.success("Created 'openai_agents.json' file.")
63
+ console.success("Created 'AGENTS.md' file.")
64
+ generate_pyproject(directory, name)
65
+ console.success("Created 'pyproject.toml' file.")
66
+ console.config(
67
+ f""" Please ensure to define {click.style("OPENAI_API_KEY", fg="bright_yellow")} in your .env file. """
68
+ )
69
+ init_command = """uipath init"""
70
+ run_command = """uipath run agent '{"message": "What is the weather in San Francisco?"}'"""
71
+ console.hint(
72
+ f""" Initialize project: {click.style(init_command, fg="cyan")}"""
73
+ )
74
+ console.hint(f""" Run agent: {click.style(run_command, fg="cyan")}""")
75
+ return MiddlewareResult(should_continue=False)
76
+ except Exception as e:
77
+ console.error(f"Error creating demo agent {str(e)}")
78
+ return MiddlewareResult(
79
+ should_continue=False,
80
+ should_include_stacktrace=True,
81
+ )
@@ -0,0 +1,5 @@
1
+ """UiPath OpenAI Chat models."""
2
+
3
+ from .openai import UiPathChatOpenAI
4
+
5
+ __all__ = ["UiPathChatOpenAI"]
@@ -0,0 +1,242 @@
1
+ """UiPath OpenAI chat client with custom endpoint integration."""
2
+
3
+ import os
4
+ from typing import Optional
5
+
6
+ import httpx
7
+ from openai import AsyncOpenAI, OpenAI
8
+ from uipath._utils._ssl_context import get_httpx_client_kwargs
9
+ from uipath.utils import EndpointManager
10
+
11
+ from .supported_models import OpenAIModels
12
+
13
+
14
+ def _rewrite_openai_url(
15
+ original_url: str, params: httpx.QueryParams
16
+ ) -> httpx.URL | None:
17
+ """Rewrite OpenAI URLs to UiPath gateway completions endpoint.
18
+
19
+ Handles URL patterns from OpenAI SDK and rewrites to /completions.
20
+ The X-UiPath-LlmGateway-ApiFlavor header determines API behavior.
21
+
22
+ Args:
23
+ original_url: Original URL from OpenAI SDK
24
+ params: Query parameters to preserve
25
+
26
+ Returns:
27
+ Rewritten URL pointing to UiPath completions endpoint
28
+ """
29
+ # Extract base URL before endpoint path
30
+ if "/responses" in original_url:
31
+ base_url = original_url.split("/responses")[0]
32
+ elif "/chat/completions" in original_url:
33
+ base_url = original_url.split("/chat/completions")[0]
34
+ elif "/completions" in original_url:
35
+ base_url = original_url.split("/completions")[0]
36
+ else:
37
+ # Handle base URL case - strip query string
38
+ base_url = original_url.split("?")[0]
39
+
40
+ new_url_str = f"{base_url}/completions"
41
+ if params:
42
+ return httpx.URL(new_url_str, params=params)
43
+ return httpx.URL(new_url_str)
44
+
45
+
46
+ class UiPathURLRewriteTransport(httpx.AsyncHTTPTransport):
47
+ """Custom async transport that rewrites URLs to UiPath endpoints."""
48
+
49
+ async def handle_async_request(self, request: httpx.Request) -> httpx.Response:
50
+ """Handle async request with URL rewriting."""
51
+ new_url = _rewrite_openai_url(str(request.url), request.url.params)
52
+ if new_url:
53
+ request.url = new_url
54
+
55
+ return await super().handle_async_request(request)
56
+
57
+
58
+ class UiPathSyncURLRewriteTransport(httpx.HTTPTransport):
59
+ """Custom sync transport that rewrites URLs to UiPath endpoints."""
60
+
61
+ def handle_request(self, request: httpx.Request) -> httpx.Response:
62
+ """Handle sync request with URL rewriting."""
63
+ new_url = _rewrite_openai_url(str(request.url), request.url.params)
64
+ if new_url:
65
+ request.url = new_url
66
+
67
+ return super().handle_request(request)
68
+
69
+
70
+ class UiPathChatOpenAI:
71
+ """UiPath OpenAI client for chat completions.
72
+
73
+ This client wraps the OpenAI SDK and configures it to use UiPath's
74
+ LLM Gateway endpoints with proper authentication and headers.
75
+
76
+ Example:
77
+ ```python
78
+ from uipath_openai_agents.chat import UiPathChatOpenAI
79
+
80
+ client = UiPathChatOpenAI(
81
+ token="your-token",
82
+ model_name="gpt-4o"
83
+ )
84
+
85
+ # Synchronous usage
86
+ response = client.chat.completions.create(
87
+ messages=[{"role": "user", "content": "Hello!"}],
88
+ model="gpt-4o"
89
+ )
90
+
91
+ # Async usage
92
+ response = await client.async_client.chat.completions.create(
93
+ messages=[{"role": "user", "content": "Hello!"}],
94
+ model="gpt-4o"
95
+ )
96
+ ```
97
+ """
98
+
99
+ def __init__(
100
+ self,
101
+ token: Optional[str] = None,
102
+ model_name: str = OpenAIModels.gpt_4o_2024_11_20,
103
+ api_version: str = "2024-12-01-preview",
104
+ org_id: Optional[str] = None,
105
+ tenant_id: Optional[str] = None,
106
+ agenthub_config: Optional[str] = None,
107
+ extra_headers: Optional[dict[str, str]] = None,
108
+ byo_connection_id: Optional[str] = None,
109
+ api_flavor: str = "responses",
110
+ ):
111
+ """Initialize UiPath OpenAI client.
112
+
113
+ Args:
114
+ token: UiPath access token (defaults to UIPATH_ACCESS_TOKEN env var)
115
+ model_name: Model to use (e.g., "gpt-4o-2024-11-20")
116
+ api_version: OpenAI API version
117
+ org_id: UiPath organization ID (defaults to UIPATH_ORGANIZATION_ID env var)
118
+ tenant_id: UiPath tenant ID (defaults to UIPATH_TENANT_ID env var)
119
+ agenthub_config: Optional AgentHub configuration
120
+ extra_headers: Additional headers to include in requests
121
+ byo_connection_id: Bring-your-own connection ID
122
+ api_flavor: API flavor to use - "responses" (default, recommended for agents),
123
+ "chat-completions" (traditional chat), or "auto" (let UiPath decide)
124
+ """
125
+ # Get credentials from env vars if not provided
126
+ self._org_id = org_id or os.getenv("UIPATH_ORGANIZATION_ID")
127
+ self._tenant_id = tenant_id or os.getenv("UIPATH_TENANT_ID")
128
+ self._token = token or os.getenv("UIPATH_ACCESS_TOKEN")
129
+
130
+ # Validate required credentials
131
+ if not self._org_id:
132
+ raise ValueError(
133
+ "UIPATH_ORGANIZATION_ID environment variable or org_id parameter is required"
134
+ )
135
+ if not self._tenant_id:
136
+ raise ValueError(
137
+ "UIPATH_TENANT_ID environment variable or tenant_id parameter is required"
138
+ )
139
+ if not self._token:
140
+ raise ValueError(
141
+ "UIPATH_ACCESS_TOKEN environment variable or token parameter is required"
142
+ )
143
+
144
+ # Store configuration
145
+ self._model_name = model_name
146
+ self._api_version = api_version
147
+ self._vendor = "openai"
148
+ self._agenthub_config = agenthub_config
149
+ self._byo_connection_id = byo_connection_id
150
+ self._api_flavor = api_flavor
151
+ self._extra_headers = extra_headers or {}
152
+
153
+ # Build base URL and headers
154
+ base_url = self._build_base_url()
155
+ headers = self._build_headers()
156
+
157
+ # Get SSL configuration
158
+ client_kwargs = get_httpx_client_kwargs()
159
+ verify = client_kwargs.get("verify", True)
160
+
161
+ # Create sync client
162
+ self._client = OpenAI(
163
+ base_url=base_url,
164
+ api_key=self._token,
165
+ default_headers=headers,
166
+ http_client=httpx.Client(
167
+ transport=UiPathSyncURLRewriteTransport(verify=verify),
168
+ **client_kwargs,
169
+ ),
170
+ )
171
+
172
+ # Create async client
173
+ self._async_client = AsyncOpenAI(
174
+ base_url=base_url,
175
+ api_key=self._token,
176
+ default_headers=headers,
177
+ http_client=httpx.AsyncClient(
178
+ transport=UiPathURLRewriteTransport(verify=verify),
179
+ **client_kwargs,
180
+ ),
181
+ )
182
+
183
+ def _build_headers(self) -> dict[str, str]:
184
+ """Build headers for UiPath LLM Gateway."""
185
+ headers = {
186
+ "X-UiPath-LlmGateway-ApiFlavor": self._api_flavor,
187
+ "Authorization": f"Bearer {self._token}",
188
+ }
189
+
190
+ # Add optional headers
191
+ if self._agenthub_config:
192
+ headers["X-UiPath-AgentHub-Config"] = self._agenthub_config
193
+ if self._byo_connection_id:
194
+ headers["X-UiPath-LlmGateway-ByoIsConnectionId"] = self._byo_connection_id
195
+ if job_key := os.getenv("UIPATH_JOB_KEY"):
196
+ headers["X-UiPath-JobKey"] = job_key
197
+ if process_key := os.getenv("UIPATH_PROCESS_KEY"):
198
+ headers["X-UiPath-ProcessKey"] = process_key
199
+
200
+ # Allow extra_headers to override defaults
201
+ headers.update(self._extra_headers)
202
+ return headers
203
+
204
+ @property
205
+ def endpoint(self) -> str:
206
+ """Get the UiPath endpoint for this model (without query parameters)."""
207
+ vendor_endpoint = EndpointManager.get_vendor_endpoint()
208
+ formatted_endpoint = vendor_endpoint.format(
209
+ vendor=self._vendor,
210
+ model=self._model_name,
211
+ )
212
+ # Remove /completions suffix - will be added by URL rewriting
213
+ base_endpoint = formatted_endpoint.replace("/completions", "")
214
+ return base_endpoint
215
+
216
+ def _build_base_url(self) -> str:
217
+ """Build the base URL for OpenAI client.
218
+
219
+ Note: Query parameters like api-version are added by the URL rewriting logic,
220
+ not in the base URL, to allow the SDK to append paths properly.
221
+ """
222
+ env_uipath_url = os.getenv("UIPATH_URL")
223
+
224
+ if env_uipath_url:
225
+ return f"{env_uipath_url.rstrip('/')}/{self.endpoint}"
226
+ else:
227
+ raise ValueError("UIPATH_URL environment variable is required")
228
+
229
+ @property
230
+ def client(self) -> OpenAI:
231
+ """Get the synchronous OpenAI client."""
232
+ return self._client
233
+
234
+ @property
235
+ def async_client(self) -> AsyncOpenAI:
236
+ """Get the asynchronous OpenAI client."""
237
+ return self._async_client
238
+
239
+ @property
240
+ def model_name(self) -> str:
241
+ """Get the configured model name."""
242
+ return self._model_name
@@ -0,0 +1,78 @@
1
+ """Supported OpenAI model definitions for UiPath LLM Gateway."""
2
+
3
+
4
+ class OpenAIModels:
5
+ """OpenAI model names supported by UiPath LLM Gateway.
6
+
7
+ These are specific model versions required by UiPath.
8
+ Generic names like "gpt-4o" are not supported - use specific versions.
9
+ """
10
+
11
+ # GPT-4o Models (recommended)
12
+ gpt_4o_2024_11_20 = "gpt-4o-2024-11-20"
13
+ gpt_4o_2024_08_06 = "gpt-4o-2024-08-06"
14
+ gpt_4o_2024_05_13 = "gpt-4o-2024-05-13"
15
+ gpt_4o_mini_2024_07_18 = "gpt-4o-mini-2024-07-18"
16
+
17
+ # GPT-4.1 Models
18
+ gpt_4_1_2025_04_14 = "gpt-4.1-2025-04-14"
19
+ gpt_4_1_mini_2025_04_14 = "gpt-4.1-mini-2025-04-14"
20
+ gpt_4_1_nano_2025_04_14 = "gpt-4.1-nano-2025-04-14"
21
+
22
+ # GPT-4 Models
23
+ gpt_4 = "gpt-4"
24
+ gpt_4_32k = "gpt-4-32k"
25
+ gpt_4_turbo_2024_04_09 = "gpt-4-turbo-2024-04-09"
26
+ gpt_4_1106_preview = "gpt-4-1106-Preview"
27
+ gpt_4_vision_preview = "gpt-4-vision-preview"
28
+
29
+ # GPT-3.5 Models
30
+ gpt_35_turbo = "gpt-35-turbo"
31
+ gpt_35_turbo_0125 = "gpt-35-turbo-0125"
32
+ gpt_35_turbo_1106 = "gpt-35-turbo-1106"
33
+ gpt_35_turbo_16k = "gpt-35-turbo-16k"
34
+
35
+ # GPT-5 Models
36
+ gpt_5_2025_08_07 = "gpt-5-2025-08-07"
37
+ gpt_5_chat_2025_08_07 = "gpt-5-chat-2025-08-07"
38
+ gpt_5_mini_2025_08_07 = "gpt-5-mini-2025-08-07"
39
+ gpt_5_nano_2025_08_07 = "gpt-5-nano-2025-08-07"
40
+ gpt_5_1_2025_11_13 = "gpt-5.1-2025-11-13"
41
+ gpt_5_2_2025_12_11 = "gpt-5.2-2025-12-11"
42
+
43
+ # o3 Models
44
+ o3_mini_2025_01_31 = "o3-mini-2025-01-31"
45
+
46
+ # Other Models
47
+ computer_use_preview_2025_03_11 = "computer-use-preview-2025-03-11"
48
+ text_davinci_003 = "text-davinci-003"
49
+
50
+ # Embedding Models
51
+ text_embedding_3_large = "text-embedding-3-large"
52
+ text_embedding_3_large_community_ecs = "text-embedding-3-large-community-ecs"
53
+ text_embedding_ada_002 = "text-embedding-ada-002"
54
+
55
+ # Model aliases - maps generic names to specific versions
56
+ MODEL_ALIASES = {
57
+ # Map gpt-4.1 variants to gpt-4o (most capable available model)
58
+ "gpt-4.1": gpt_4o_2024_11_20,
59
+ "gpt-4.1-mini": gpt_4o_mini_2024_07_18,
60
+ "gpt-4.1-nano": gpt_4o_mini_2024_07_18,
61
+ "gpt-4.1-2025-04-14": gpt_4o_2024_11_20, # Map invalid model to valid one
62
+ "gpt-4.1-mini-2025-04-14": gpt_4o_mini_2024_07_18,
63
+ "gpt-4.1-nano-2025-04-14": gpt_4o_mini_2024_07_18,
64
+ # Generic model mappings
65
+ "gpt-4o": gpt_4o_2024_11_20,
66
+ "gpt-4o-mini": gpt_4o_mini_2024_07_18,
67
+ "gpt-5": gpt_5_2025_08_07,
68
+ "gpt-5-mini": gpt_5_mini_2025_08_07,
69
+ "gpt-5-nano": gpt_5_nano_2025_08_07,
70
+ "gpt-5.1": gpt_5_1_2025_11_13,
71
+ "gpt-5.2": gpt_5_2_2025_12_11,
72
+ "o3-mini": o3_mini_2025_01_31,
73
+ }
74
+
75
+ @classmethod
76
+ def normalize_model_name(cls, model_name: str) -> str:
77
+ """Normalize a model name to UiPath-specific version."""
78
+ return cls.MODEL_ALIASES.get(model_name, model_name)
@@ -0,0 +1,8 @@
1
+ from uipath._cli.middlewares import Middlewares
2
+
3
+ from ._cli.cli_new import openai_agents_new_middleware
4
+
5
+
6
+ def register_middleware():
7
+ """This function will be called by the entry point system when uipath-openai-agents is installed"""
8
+ Middlewares.register("new", openai_agents_new_middleware)
File without changes
@@ -0,0 +1,40 @@
1
+ """UiPath OpenAI Agents Runtime."""
2
+
3
+ from uipath.runtime import (
4
+ UiPathRuntimeContext,
5
+ UiPathRuntimeFactoryProtocol,
6
+ UiPathRuntimeFactoryRegistry,
7
+ )
8
+
9
+ from uipath_openai_agents.runtime.factory import UiPathOpenAIAgentRuntimeFactory
10
+ from uipath_openai_agents.runtime.runtime import UiPathOpenAIAgentRuntime
11
+ from uipath_openai_agents.runtime.schema import (
12
+ get_agent_schema,
13
+ get_entrypoints_schema,
14
+ )
15
+
16
+
17
+ def register_runtime_factory() -> None:
18
+ """Register the OpenAI Agents factory. Called automatically via entry point."""
19
+
20
+ def create_factory(
21
+ context: UiPathRuntimeContext | None = None,
22
+ ) -> UiPathRuntimeFactoryProtocol:
23
+ return UiPathOpenAIAgentRuntimeFactory(
24
+ context=context if context else UiPathRuntimeContext(),
25
+ )
26
+
27
+ UiPathRuntimeFactoryRegistry.register(
28
+ "openai-agents", create_factory, "openai_agents.json"
29
+ )
30
+
31
+
32
+ register_runtime_factory()
33
+
34
+ __all__ = [
35
+ "register_runtime_factory",
36
+ "get_entrypoints_schema",
37
+ "get_agent_schema",
38
+ "UiPathOpenAIAgentRuntimeFactory",
39
+ "UiPathOpenAIAgentRuntime",
40
+ ]
@@ -0,0 +1,51 @@
1
+ import dataclasses
2
+ from enum import Enum
3
+ from typing import Any
4
+
5
+
6
+ def serialize_output(output: Any) -> Any:
7
+ """
8
+ Recursively serialize an output object.
9
+
10
+ Args:
11
+ output: The object to serialize
12
+
13
+ Returns:
14
+ Dict[str, Any]: Serialized output as dictionary
15
+ """
16
+ if output is None:
17
+ return {}
18
+
19
+ # Handle Pydantic models
20
+ if hasattr(output, "model_dump"):
21
+ return serialize_output(output.model_dump(by_alias=True))
22
+ elif hasattr(output, "dict"):
23
+ return serialize_output(output.dict())
24
+ elif hasattr(output, "to_dict"):
25
+ return serialize_output(output.to_dict())
26
+
27
+ # Handle dataclasses (but not dataclass types)
28
+ elif dataclasses.is_dataclass(output) and not isinstance(output, type):
29
+ return serialize_output(dataclasses.asdict(output))
30
+
31
+ # Handle dictionaries
32
+ elif isinstance(output, dict):
33
+ return {k: serialize_output(v) for k, v in output.items()}
34
+
35
+ # Handle lists
36
+ elif isinstance(output, list):
37
+ return [serialize_output(item) for item in output]
38
+
39
+ # Handle other iterables (convert to dict first)
40
+ elif hasattr(output, "__iter__") and not isinstance(output, (str, bytes)):
41
+ try:
42
+ return serialize_output(dict(output))
43
+ except (TypeError, ValueError):
44
+ return output
45
+
46
+ # Handle Enums
47
+ elif isinstance(output, Enum):
48
+ return output.value
49
+
50
+ # Return primitive types as is
51
+ return output