uipath-openai-agents 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uipath_openai_agents/__init__.py +24 -3
- uipath_openai_agents/_cli/_templates/AGENTS.md.template +1 -1
- uipath_openai_agents/_cli/_templates/main.py.template +13 -7
- uipath_openai_agents/_cli/cli_new.py +2 -6
- uipath_openai_agents/chat/__init__.py +30 -3
- uipath_openai_agents/chat/supported_models.py +42 -60
- uipath_openai_agents/runtime/__init__.py +34 -8
- uipath_openai_agents/runtime/_serialize.py +4 -4
- uipath_openai_agents/runtime/agent.py +0 -17
- uipath_openai_agents/runtime/factory.py +0 -120
- uipath_openai_agents/runtime/runtime.py +21 -205
- uipath_openai_agents/runtime/schema.py +124 -172
- uipath_openai_agents-0.0.3.dist-info/METADATA +146 -0
- uipath_openai_agents-0.0.3.dist-info/RECORD +23 -0
- uipath_openai_agents/runtime/_sqlite.py +0 -190
- uipath_openai_agents/runtime/_telemetry.py +0 -32
- uipath_openai_agents/runtime/storage.py +0 -357
- uipath_openai_agents-0.0.1.dist-info/METADATA +0 -53
- uipath_openai_agents-0.0.1.dist-info/RECORD +0 -26
- {uipath_openai_agents-0.0.1.dist-info → uipath_openai_agents-0.0.3.dist-info}/WHEEL +0 -0
- {uipath_openai_agents-0.0.1.dist-info → uipath_openai_agents-0.0.3.dist-info}/entry_points.txt +0 -0
uipath_openai_agents/__init__.py
CHANGED
|
@@ -1,7 +1,28 @@
|
|
|
1
|
-
"""
|
|
1
|
+
"""
|
|
2
|
+
UiPath OpenAI Agents SDK.
|
|
3
|
+
|
|
4
|
+
NOTE: This module uses lazy imports via __getattr__ to avoid loading heavy
|
|
5
|
+
dependencies (openai SDK) at import time. This significantly improves CLI
|
|
6
|
+
startup performance.
|
|
7
|
+
|
|
8
|
+
Do NOT add eager imports like:
|
|
9
|
+
from .chat import UiPathChatOpenAI # BAD - loads openai SDK immediately
|
|
10
|
+
|
|
11
|
+
Instead, all exports are loaded on-demand when first accessed.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def __getattr__(name):
|
|
16
|
+
if name == "UiPathChatOpenAI":
|
|
17
|
+
from .chat import UiPathChatOpenAI
|
|
18
|
+
|
|
19
|
+
return UiPathChatOpenAI
|
|
20
|
+
if name == "register_middleware":
|
|
21
|
+
from .middlewares import register_middleware
|
|
22
|
+
|
|
23
|
+
return register_middleware
|
|
24
|
+
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
|
2
25
|
|
|
3
|
-
from .chat import UiPathChatOpenAI
|
|
4
|
-
from .middlewares import register_middleware
|
|
5
26
|
|
|
6
27
|
__version__ = "0.1.0"
|
|
7
28
|
__all__ = ["register_middleware", "UiPathChatOpenAI"]
|
|
@@ -18,4 +18,4 @@ This documentation is split into multiple files for efficient context loading. L
|
|
|
18
18
|
|
|
19
19
|
3. **@.agent/CLI_REFERENCE.md** - CLI commands documentation
|
|
20
20
|
- **When to load:** Working with `uipath init`, `uipath run agent`, or deployment commands
|
|
21
|
-
- **Contains:** Command syntax for OpenAI agents, options, input formats (`{"
|
|
21
|
+
- **Contains:** Command syntax for OpenAI agents, options, input formats (`{"messages": "..."}` or `{"messages": [...]}`), debug mode, usage examples
|
|
@@ -1,7 +1,11 @@
|
|
|
1
|
-
from agents import Agent
|
|
2
|
-
from
|
|
1
|
+
from agents import Agent, function_tool
|
|
2
|
+
from agents.models import _openai_shared
|
|
3
3
|
|
|
4
|
+
from uipath_openai_agents.chat import UiPathChatOpenAI
|
|
5
|
+
from uipath_openai_agents.chat.supported_models import OpenAIModels
|
|
4
6
|
|
|
7
|
+
|
|
8
|
+
@function_tool
|
|
5
9
|
def get_weather(location: str) -> str:
|
|
6
10
|
"""Get the current weather for a location.
|
|
7
11
|
|
|
@@ -12,17 +16,19 @@ def get_weather(location: str) -> str:
|
|
|
12
16
|
Weather information for the location
|
|
13
17
|
"""
|
|
14
18
|
# This is a mock implementation
|
|
15
|
-
return f"The weather in {location} is sunny and
|
|
19
|
+
return f"The weather in {location} is sunny and 32 degrees Celsius."
|
|
20
|
+
|
|
16
21
|
|
|
22
|
+
MODEL = OpenAIModels.gpt_5_1_2025_11_13
|
|
17
23
|
|
|
18
|
-
# Initialize the OpenAI client
|
|
19
|
-
|
|
24
|
+
# Initialize the UiPath OpenAI client
|
|
25
|
+
uipath_openai_client = UiPathChatOpenAI(model_name=MODEL)
|
|
26
|
+
_openai_shared.set_default_openai_client(uipath_openai_client.async_client)
|
|
20
27
|
|
|
21
28
|
# Create an agent with tools
|
|
22
29
|
agent = Agent(
|
|
23
30
|
name="weather_agent",
|
|
24
31
|
instructions="You are a helpful weather assistant. Use the get_weather tool to provide weather information.",
|
|
25
|
-
model=
|
|
32
|
+
model=MODEL,
|
|
26
33
|
tools=[get_weather],
|
|
27
|
-
client=client,
|
|
28
34
|
)
|
|
@@ -39,8 +39,7 @@ version = "0.0.1"
|
|
|
39
39
|
description = "{project_name}"
|
|
40
40
|
authors = [{{ name = "John Doe", email = "john.doe@myemail.com" }}]
|
|
41
41
|
dependencies = [
|
|
42
|
-
"uipath-openai-agents>=0.1.0",
|
|
43
|
-
"openai>=1.0.0"
|
|
42
|
+
"uipath-openai-agents>=0.0.1, <0.1.0",
|
|
44
43
|
]
|
|
45
44
|
requires-python = ">=3.11"
|
|
46
45
|
"""
|
|
@@ -63,11 +62,8 @@ def openai_agents_new_middleware(name: str) -> MiddlewareResult:
|
|
|
63
62
|
console.success("Created 'AGENTS.md' file.")
|
|
64
63
|
generate_pyproject(directory, name)
|
|
65
64
|
console.success("Created 'pyproject.toml' file.")
|
|
66
|
-
console.config(
|
|
67
|
-
f""" Please ensure to define {click.style("OPENAI_API_KEY", fg="bright_yellow")} in your .env file. """
|
|
68
|
-
)
|
|
69
65
|
init_command = """uipath init"""
|
|
70
|
-
run_command = """uipath run agent '{"
|
|
66
|
+
run_command = """uipath run agent '{"messages": "What is the weather in San Francisco?"}'"""
|
|
71
67
|
console.hint(
|
|
72
68
|
f""" Initialize project: {click.style(init_command, fg="cyan")}"""
|
|
73
69
|
)
|
|
@@ -1,5 +1,32 @@
|
|
|
1
|
-
"""
|
|
1
|
+
"""
|
|
2
|
+
UiPath OpenAI Chat module.
|
|
2
3
|
|
|
3
|
-
|
|
4
|
+
NOTE: This module uses lazy imports via __getattr__ to avoid loading heavy
|
|
5
|
+
dependencies (openai SDK, httpx) at import time. This significantly
|
|
6
|
+
improves CLI startup performance.
|
|
4
7
|
|
|
5
|
-
|
|
8
|
+
Do NOT add eager imports like:
|
|
9
|
+
from .openai import UiPathChatOpenAI # BAD - loads openai SDK immediately
|
|
10
|
+
|
|
11
|
+
Instead, all exports are loaded on-demand when first accessed.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def __getattr__(name):
|
|
16
|
+
if name == "UiPathChatOpenAI":
|
|
17
|
+
from .openai import UiPathChatOpenAI
|
|
18
|
+
|
|
19
|
+
return UiPathChatOpenAI
|
|
20
|
+
if name in ("OpenAIModels", "GeminiModels", "BedrockModels"):
|
|
21
|
+
from . import supported_models
|
|
22
|
+
|
|
23
|
+
return getattr(supported_models, name)
|
|
24
|
+
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
__all__ = [
|
|
28
|
+
"UiPathChatOpenAI",
|
|
29
|
+
"OpenAIModels",
|
|
30
|
+
"GeminiModels",
|
|
31
|
+
"BedrockModels",
|
|
32
|
+
]
|
|
@@ -1,78 +1,60 @@
|
|
|
1
|
-
"""Supported
|
|
1
|
+
"""Supported model definitions for UiPath LLM Gateway.
|
|
2
2
|
|
|
3
|
+
OpenAI Agents SDK supports other LLM providers (Bedrock, Gemini, etc.)
|
|
4
|
+
through LiteLLM integration, so we provide model definitions for all supported providers.
|
|
5
|
+
"""
|
|
3
6
|
|
|
4
|
-
|
|
5
|
-
"""OpenAI model names supported by UiPath LLM Gateway.
|
|
7
|
+
from enum import StrEnum
|
|
6
8
|
|
|
7
|
-
These are specific model versions required by UiPath.
|
|
8
|
-
Generic names like "gpt-4o" are not supported - use specific versions.
|
|
9
|
-
"""
|
|
10
9
|
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
10
|
+
class OpenAIModels(StrEnum):
|
|
11
|
+
"""Supported OpenAI model identifiers."""
|
|
12
|
+
|
|
13
|
+
# GPT-4o models
|
|
14
14
|
gpt_4o_2024_05_13 = "gpt-4o-2024-05-13"
|
|
15
|
+
gpt_4o_2024_08_06 = "gpt-4o-2024-08-06"
|
|
16
|
+
gpt_4o_2024_11_20 = "gpt-4o-2024-11-20"
|
|
15
17
|
gpt_4o_mini_2024_07_18 = "gpt-4o-mini-2024-07-18"
|
|
16
18
|
|
|
17
|
-
# GPT-4.1
|
|
19
|
+
# GPT-4.1 models
|
|
18
20
|
gpt_4_1_2025_04_14 = "gpt-4.1-2025-04-14"
|
|
19
21
|
gpt_4_1_mini_2025_04_14 = "gpt-4.1-mini-2025-04-14"
|
|
20
22
|
gpt_4_1_nano_2025_04_14 = "gpt-4.1-nano-2025-04-14"
|
|
21
23
|
|
|
22
|
-
# GPT-
|
|
23
|
-
gpt_4 = "gpt-4"
|
|
24
|
-
gpt_4_32k = "gpt-4-32k"
|
|
25
|
-
gpt_4_turbo_2024_04_09 = "gpt-4-turbo-2024-04-09"
|
|
26
|
-
gpt_4_1106_preview = "gpt-4-1106-Preview"
|
|
27
|
-
gpt_4_vision_preview = "gpt-4-vision-preview"
|
|
28
|
-
|
|
29
|
-
# GPT-3.5 Models
|
|
30
|
-
gpt_35_turbo = "gpt-35-turbo"
|
|
31
|
-
gpt_35_turbo_0125 = "gpt-35-turbo-0125"
|
|
32
|
-
gpt_35_turbo_1106 = "gpt-35-turbo-1106"
|
|
33
|
-
gpt_35_turbo_16k = "gpt-35-turbo-16k"
|
|
34
|
-
|
|
35
|
-
# GPT-5 Models
|
|
24
|
+
# GPT-5 models
|
|
36
25
|
gpt_5_2025_08_07 = "gpt-5-2025-08-07"
|
|
37
26
|
gpt_5_chat_2025_08_07 = "gpt-5-chat-2025-08-07"
|
|
38
27
|
gpt_5_mini_2025_08_07 = "gpt-5-mini-2025-08-07"
|
|
39
28
|
gpt_5_nano_2025_08_07 = "gpt-5-nano-2025-08-07"
|
|
29
|
+
|
|
30
|
+
# GPT-5.1 models
|
|
40
31
|
gpt_5_1_2025_11_13 = "gpt-5.1-2025-11-13"
|
|
32
|
+
|
|
33
|
+
# GPT-5.2 models
|
|
41
34
|
gpt_5_2_2025_12_11 = "gpt-5.2-2025-12-11"
|
|
42
35
|
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
"gpt-5-mini": gpt_5_mini_2025_08_07,
|
|
69
|
-
"gpt-5-nano": gpt_5_nano_2025_08_07,
|
|
70
|
-
"gpt-5.1": gpt_5_1_2025_11_13,
|
|
71
|
-
"gpt-5.2": gpt_5_2_2025_12_11,
|
|
72
|
-
"o3-mini": o3_mini_2025_01_31,
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
@classmethod
|
|
76
|
-
def normalize_model_name(cls, model_name: str) -> str:
|
|
77
|
-
"""Normalize a model name to UiPath-specific version."""
|
|
78
|
-
return cls.MODEL_ALIASES.get(model_name, model_name)
|
|
36
|
+
|
|
37
|
+
class GeminiModels(StrEnum):
|
|
38
|
+
"""Supported Google Gemini model identifiers."""
|
|
39
|
+
|
|
40
|
+
# Gemini 2 models
|
|
41
|
+
gemini_2_5_pro = "gemini-2.5-pro"
|
|
42
|
+
gemini_2_5_flash = "gemini-2.5-flash"
|
|
43
|
+
gemini_2_0_flash_001 = "gemini-2.0-flash-001"
|
|
44
|
+
|
|
45
|
+
# Gemini 3 models
|
|
46
|
+
gemini_3_pro_preview = "gemini-3-pro-preview"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class BedrockModels(StrEnum):
|
|
50
|
+
"""Supported AWS Bedrock model identifiers."""
|
|
51
|
+
|
|
52
|
+
# Claude 3.7 models
|
|
53
|
+
anthropic_claude_3_7_sonnet = "anthropic.claude-3-7-sonnet-20250219-v1:0"
|
|
54
|
+
|
|
55
|
+
# Claude 4 models
|
|
56
|
+
anthropic_claude_sonnet_4 = "anthropic.claude-sonnet-4-20250514-v1:0"
|
|
57
|
+
|
|
58
|
+
# Claude 4.5 models
|
|
59
|
+
anthropic_claude_sonnet_4_5 = "anthropic.claude-sonnet-4-5-20250929-v1:0"
|
|
60
|
+
anthropic_claude_haiku_4_5 = "anthropic.claude-haiku-4-5-20251001-v1:0"
|
|
@@ -1,4 +1,10 @@
|
|
|
1
|
-
"""
|
|
1
|
+
"""
|
|
2
|
+
UiPath OpenAI Agents Runtime.
|
|
3
|
+
|
|
4
|
+
NOTE: This module uses lazy imports for most exports to avoid loading heavy
|
|
5
|
+
dependencies (openai SDK) at import time. However, the runtime factory
|
|
6
|
+
registration must happen eagerly for CLI discovery.
|
|
7
|
+
"""
|
|
2
8
|
|
|
3
9
|
from uipath.runtime import (
|
|
4
10
|
UiPathRuntimeContext,
|
|
@@ -6,13 +12,6 @@ from uipath.runtime import (
|
|
|
6
12
|
UiPathRuntimeFactoryRegistry,
|
|
7
13
|
)
|
|
8
14
|
|
|
9
|
-
from uipath_openai_agents.runtime.factory import UiPathOpenAIAgentRuntimeFactory
|
|
10
|
-
from uipath_openai_agents.runtime.runtime import UiPathOpenAIAgentRuntime
|
|
11
|
-
from uipath_openai_agents.runtime.schema import (
|
|
12
|
-
get_agent_schema,
|
|
13
|
-
get_entrypoints_schema,
|
|
14
|
-
)
|
|
15
|
-
|
|
16
15
|
|
|
17
16
|
def register_runtime_factory() -> None:
|
|
18
17
|
"""Register the OpenAI Agents factory. Called automatically via entry point."""
|
|
@@ -20,6 +19,11 @@ def register_runtime_factory() -> None:
|
|
|
20
19
|
def create_factory(
|
|
21
20
|
context: UiPathRuntimeContext | None = None,
|
|
22
21
|
) -> UiPathRuntimeFactoryProtocol:
|
|
22
|
+
# Import lazily when factory is actually created
|
|
23
|
+
from uipath_openai_agents.runtime.factory import (
|
|
24
|
+
UiPathOpenAIAgentRuntimeFactory,
|
|
25
|
+
)
|
|
26
|
+
|
|
23
27
|
return UiPathOpenAIAgentRuntimeFactory(
|
|
24
28
|
context=context if context else UiPathRuntimeContext(),
|
|
25
29
|
)
|
|
@@ -29,8 +33,30 @@ def register_runtime_factory() -> None:
|
|
|
29
33
|
)
|
|
30
34
|
|
|
31
35
|
|
|
36
|
+
# Register factory eagerly (required for CLI discovery)
|
|
32
37
|
register_runtime_factory()
|
|
33
38
|
|
|
39
|
+
|
|
40
|
+
def __getattr__(name):
|
|
41
|
+
if name == "get_entrypoints_schema":
|
|
42
|
+
from .schema import get_entrypoints_schema
|
|
43
|
+
|
|
44
|
+
return get_entrypoints_schema
|
|
45
|
+
if name == "get_agent_schema":
|
|
46
|
+
from .schema import get_agent_schema
|
|
47
|
+
|
|
48
|
+
return get_agent_schema
|
|
49
|
+
if name == "UiPathOpenAIAgentRuntimeFactory":
|
|
50
|
+
from .factory import UiPathOpenAIAgentRuntimeFactory
|
|
51
|
+
|
|
52
|
+
return UiPathOpenAIAgentRuntimeFactory
|
|
53
|
+
if name == "UiPathOpenAIAgentRuntime":
|
|
54
|
+
from .runtime import UiPathOpenAIAgentRuntime
|
|
55
|
+
|
|
56
|
+
return UiPathOpenAIAgentRuntime
|
|
57
|
+
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
|
58
|
+
|
|
59
|
+
|
|
34
60
|
__all__ = [
|
|
35
61
|
"register_runtime_factory",
|
|
36
62
|
"get_entrypoints_schema",
|
|
@@ -16,12 +16,12 @@ def serialize_output(output: Any) -> Any:
|
|
|
16
16
|
if output is None:
|
|
17
17
|
return {}
|
|
18
18
|
|
|
19
|
-
# Handle Pydantic models
|
|
20
|
-
if hasattr(output, "model_dump"):
|
|
19
|
+
# Handle Pydantic models (but not Pydantic model classes)
|
|
20
|
+
if hasattr(output, "model_dump") and not isinstance(output, type):
|
|
21
21
|
return serialize_output(output.model_dump(by_alias=True))
|
|
22
|
-
elif hasattr(output, "dict"):
|
|
22
|
+
elif hasattr(output, "dict") and not isinstance(output, type):
|
|
23
23
|
return serialize_output(output.dict())
|
|
24
|
-
elif hasattr(output, "to_dict"):
|
|
24
|
+
elif hasattr(output, "to_dict") and not isinstance(output, type):
|
|
25
25
|
return serialize_output(output.to_dict())
|
|
26
26
|
|
|
27
27
|
# Handle dataclasses (but not dataclass types)
|
|
@@ -28,9 +28,6 @@ class OpenAiAgentLoader:
|
|
|
28
28
|
self.file_path = file_path
|
|
29
29
|
self.variable_name = variable_name
|
|
30
30
|
self._context_manager: Any = None
|
|
31
|
-
self._loaded_object: Any = (
|
|
32
|
-
None # Store original loaded object for type inference
|
|
33
|
-
)
|
|
34
31
|
|
|
35
32
|
@classmethod
|
|
36
33
|
def from_path_string(cls, name: str, file_path: str) -> Self:
|
|
@@ -102,9 +99,6 @@ class OpenAiAgentLoader:
|
|
|
102
99
|
category=UiPathErrorCategory.USER,
|
|
103
100
|
)
|
|
104
101
|
|
|
105
|
-
# Store the original loaded object for type inference
|
|
106
|
-
self._loaded_object = agent_object
|
|
107
|
-
|
|
108
102
|
agent = await self._resolve_agent(agent_object)
|
|
109
103
|
if not isinstance(agent, Agent):
|
|
110
104
|
raise UiPathOpenAIAgentsRuntimeError(
|
|
@@ -179,17 +173,6 @@ class OpenAiAgentLoader:
|
|
|
179
173
|
|
|
180
174
|
return agent_instance
|
|
181
175
|
|
|
182
|
-
def get_loaded_object(self) -> Any:
|
|
183
|
-
"""
|
|
184
|
-
Get the original loaded object before agent resolution.
|
|
185
|
-
|
|
186
|
-
This is useful for extracting type annotations from wrapper functions.
|
|
187
|
-
|
|
188
|
-
Returns:
|
|
189
|
-
The original loaded object (could be an Agent, function, or callable)
|
|
190
|
-
"""
|
|
191
|
-
return self._loaded_object
|
|
192
|
-
|
|
193
176
|
async def cleanup(self) -> None:
|
|
194
177
|
"""Clean up resources (e.g., exit async context managers)."""
|
|
195
178
|
if self._context_manager:
|
|
@@ -1,19 +1,16 @@
|
|
|
1
1
|
"""Factory for creating OpenAI Agents runtimes from openai_agents.json configuration."""
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
|
-
import os
|
|
5
4
|
from typing import Any
|
|
6
5
|
|
|
7
6
|
from agents import Agent
|
|
8
7
|
from openinference.instrumentation.openai_agents import OpenAIAgentsInstrumentor
|
|
9
|
-
from uipath.core.tracing import UiPathSpanUtils
|
|
10
8
|
from uipath.runtime import (
|
|
11
9
|
UiPathRuntimeContext,
|
|
12
10
|
UiPathRuntimeProtocol,
|
|
13
11
|
)
|
|
14
12
|
from uipath.runtime.errors import UiPathErrorCategory
|
|
15
13
|
|
|
16
|
-
from uipath_openai_agents.runtime._telemetry import get_current_span_wrapper
|
|
17
14
|
from uipath_openai_agents.runtime.agent import OpenAiAgentLoader
|
|
18
15
|
from uipath_openai_agents.runtime.config import OpenAiAgentsConfig
|
|
19
16
|
from uipath_openai_agents.runtime.errors import (
|
|
@@ -21,7 +18,6 @@ from uipath_openai_agents.runtime.errors import (
|
|
|
21
18
|
UiPathOpenAIAgentsRuntimeError,
|
|
22
19
|
)
|
|
23
20
|
from uipath_openai_agents.runtime.runtime import UiPathOpenAIAgentRuntime
|
|
24
|
-
from uipath_openai_agents.runtime.storage import SqliteAgentStorage
|
|
25
21
|
|
|
26
22
|
|
|
27
23
|
class UiPathOpenAIAgentRuntimeFactory:
|
|
@@ -44,110 +40,11 @@ class UiPathOpenAIAgentRuntimeFactory:
|
|
|
44
40
|
self._agent_loaders: dict[str, OpenAiAgentLoader] = {}
|
|
45
41
|
self._agent_lock = asyncio.Lock()
|
|
46
42
|
|
|
47
|
-
self._storage: SqliteAgentStorage | None = None
|
|
48
|
-
self._storage_lock = asyncio.Lock()
|
|
49
|
-
|
|
50
43
|
self._setup_instrumentation()
|
|
51
44
|
|
|
52
45
|
def _setup_instrumentation(self) -> None:
|
|
53
46
|
"""Setup tracing and instrumentation."""
|
|
54
47
|
OpenAIAgentsInstrumentor().instrument()
|
|
55
|
-
UiPathSpanUtils.register_current_span_provider(get_current_span_wrapper)
|
|
56
|
-
|
|
57
|
-
async def _get_or_create_storage(self) -> SqliteAgentStorage | None:
|
|
58
|
-
"""Get or create the shared storage instance.
|
|
59
|
-
|
|
60
|
-
Returns:
|
|
61
|
-
Shared storage instance, or None if storage is disabled
|
|
62
|
-
"""
|
|
63
|
-
async with self._storage_lock:
|
|
64
|
-
if self._storage is None:
|
|
65
|
-
storage_path = self._get_storage_path()
|
|
66
|
-
if storage_path:
|
|
67
|
-
self._storage = SqliteAgentStorage(storage_path)
|
|
68
|
-
await self._storage.setup()
|
|
69
|
-
return self._storage
|
|
70
|
-
|
|
71
|
-
def _remove_file_with_retry(self, path: str, max_attempts: int = 5) -> None:
|
|
72
|
-
"""Remove file with retry logic for Windows file locking.
|
|
73
|
-
|
|
74
|
-
OpenAI SDK uses sync sqlite3 which doesn't immediately release file locks
|
|
75
|
-
on Windows. This retry mechanism gives the OS time to release the lock.
|
|
76
|
-
|
|
77
|
-
Args:
|
|
78
|
-
path: Path to file to remove
|
|
79
|
-
max_attempts: Maximum number of retry attempts (default: 5)
|
|
80
|
-
|
|
81
|
-
Raises:
|
|
82
|
-
OSError: If file cannot be removed after all retries
|
|
83
|
-
"""
|
|
84
|
-
import time
|
|
85
|
-
|
|
86
|
-
for attempt in range(max_attempts):
|
|
87
|
-
try:
|
|
88
|
-
os.remove(path)
|
|
89
|
-
return # Success
|
|
90
|
-
except PermissionError:
|
|
91
|
-
if attempt == max_attempts - 1:
|
|
92
|
-
# Last attempt failed, re-raise
|
|
93
|
-
raise
|
|
94
|
-
# Exponential backoff: 0.1s, 0.2s, 0.4s, 0.8s
|
|
95
|
-
time.sleep(0.1 * (2**attempt))
|
|
96
|
-
|
|
97
|
-
def _get_storage_path(self) -> str | None:
|
|
98
|
-
"""Get the storage path for agent state.
|
|
99
|
-
|
|
100
|
-
Returns:
|
|
101
|
-
Path to SQLite database for storage, or None if storage is disabled
|
|
102
|
-
"""
|
|
103
|
-
if self.context.state_file_path is not None:
|
|
104
|
-
return self.context.state_file_path
|
|
105
|
-
|
|
106
|
-
if self.context.runtime_dir and self.context.state_file:
|
|
107
|
-
path = os.path.join(self.context.runtime_dir, self.context.state_file)
|
|
108
|
-
if (
|
|
109
|
-
not self.context.resume
|
|
110
|
-
and self.context.job_id is None
|
|
111
|
-
and not self.context.keep_state_file
|
|
112
|
-
):
|
|
113
|
-
# If not resuming and no job id, delete the previous state file
|
|
114
|
-
if os.path.exists(path):
|
|
115
|
-
self._remove_file_with_retry(path)
|
|
116
|
-
os.makedirs(self.context.runtime_dir, exist_ok=True)
|
|
117
|
-
return path
|
|
118
|
-
|
|
119
|
-
default_path = os.path.join("__uipath", "state.db")
|
|
120
|
-
os.makedirs(os.path.dirname(default_path), exist_ok=True)
|
|
121
|
-
return default_path
|
|
122
|
-
|
|
123
|
-
def _get_storage_path_legacy(self, runtime_id: str) -> str | None:
|
|
124
|
-
"""
|
|
125
|
-
Get the storage path for agent session state.
|
|
126
|
-
|
|
127
|
-
Args:
|
|
128
|
-
runtime_id: Unique identifier for the runtime instance
|
|
129
|
-
|
|
130
|
-
Returns:
|
|
131
|
-
Path to SQLite database for session storage, or None if storage is disabled
|
|
132
|
-
"""
|
|
133
|
-
if self.context.runtime_dir and self.context.state_file:
|
|
134
|
-
# Use state file name pattern but with runtime_id
|
|
135
|
-
base_name = os.path.splitext(self.context.state_file)[0]
|
|
136
|
-
file_name = f"{base_name}_{runtime_id}.db"
|
|
137
|
-
path = os.path.join(self.context.runtime_dir, file_name)
|
|
138
|
-
|
|
139
|
-
if not self.context.resume and self.context.job_id is None:
|
|
140
|
-
# If not resuming and no job id, delete the previous state file
|
|
141
|
-
if os.path.exists(path):
|
|
142
|
-
self._remove_file_with_retry(path)
|
|
143
|
-
|
|
144
|
-
os.makedirs(self.context.runtime_dir, exist_ok=True)
|
|
145
|
-
return path
|
|
146
|
-
|
|
147
|
-
# Default storage path
|
|
148
|
-
default_dir = os.path.join("__uipath", "sessions")
|
|
149
|
-
os.makedirs(default_dir, exist_ok=True)
|
|
150
|
-
return os.path.join(default_dir, f"{runtime_id}.db")
|
|
151
48
|
|
|
152
49
|
def _load_config(self) -> OpenAiAgentsConfig:
|
|
153
50
|
"""Load openai_agents.json configuration."""
|
|
@@ -299,22 +196,10 @@ class UiPathOpenAIAgentRuntimeFactory:
|
|
|
299
196
|
Returns:
|
|
300
197
|
Configured runtime instance
|
|
301
198
|
"""
|
|
302
|
-
# Get shared storage instance
|
|
303
|
-
storage = await self._get_or_create_storage()
|
|
304
|
-
storage_path = storage.storage_path if storage else None
|
|
305
|
-
|
|
306
|
-
# Get the loaded object from the agent loader for schema inference
|
|
307
|
-
loaded_object = None
|
|
308
|
-
if entrypoint in self._agent_loaders:
|
|
309
|
-
loaded_object = self._agent_loaders[entrypoint].get_loaded_object()
|
|
310
|
-
|
|
311
199
|
return UiPathOpenAIAgentRuntime(
|
|
312
200
|
agent=agent,
|
|
313
201
|
runtime_id=runtime_id,
|
|
314
202
|
entrypoint=entrypoint,
|
|
315
|
-
storage_path=storage_path,
|
|
316
|
-
loaded_object=loaded_object,
|
|
317
|
-
storage=storage,
|
|
318
203
|
)
|
|
319
204
|
|
|
320
205
|
async def new_runtime(
|
|
@@ -346,8 +231,3 @@ class UiPathOpenAIAgentRuntimeFactory:
|
|
|
346
231
|
|
|
347
232
|
self._agent_loaders.clear()
|
|
348
233
|
self._agent_cache.clear()
|
|
349
|
-
|
|
350
|
-
# Dispose shared storage
|
|
351
|
-
if self._storage:
|
|
352
|
-
await self._storage.dispose()
|
|
353
|
-
self._storage = None
|