uipath-langchain 0.1.28__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. uipath_langchain/_cli/_templates/langgraph.json.template +2 -4
  2. uipath_langchain/_cli/cli_new.py +1 -2
  3. uipath_langchain/_utils/_request_mixin.py +8 -0
  4. uipath_langchain/_utils/_settings.py +3 -2
  5. uipath_langchain/agent/guardrails/__init__.py +0 -16
  6. uipath_langchain/agent/guardrails/actions/__init__.py +2 -0
  7. uipath_langchain/agent/guardrails/actions/block_action.py +1 -1
  8. uipath_langchain/agent/guardrails/actions/escalate_action.py +265 -138
  9. uipath_langchain/agent/guardrails/actions/filter_action.py +290 -0
  10. uipath_langchain/agent/guardrails/actions/log_action.py +1 -1
  11. uipath_langchain/agent/guardrails/guardrail_nodes.py +193 -42
  12. uipath_langchain/agent/guardrails/guardrails_factory.py +235 -14
  13. uipath_langchain/agent/guardrails/types.py +0 -12
  14. uipath_langchain/agent/guardrails/utils.py +177 -0
  15. uipath_langchain/agent/react/agent.py +24 -9
  16. uipath_langchain/agent/react/constants.py +1 -2
  17. uipath_langchain/agent/react/file_type_handler.py +123 -0
  18. uipath_langchain/agent/{guardrails → react/guardrails}/guardrails_subgraph.py +119 -25
  19. uipath_langchain/agent/react/init_node.py +16 -1
  20. uipath_langchain/agent/react/job_attachments.py +125 -0
  21. uipath_langchain/agent/react/json_utils.py +183 -0
  22. uipath_langchain/agent/react/jsonschema_pydantic_converter.py +76 -0
  23. uipath_langchain/agent/react/llm_node.py +41 -10
  24. uipath_langchain/agent/react/llm_with_files.py +76 -0
  25. uipath_langchain/agent/react/router.py +48 -37
  26. uipath_langchain/agent/react/types.py +19 -1
  27. uipath_langchain/agent/react/utils.py +30 -4
  28. uipath_langchain/agent/tools/__init__.py +7 -1
  29. uipath_langchain/agent/tools/context_tool.py +151 -1
  30. uipath_langchain/agent/tools/escalation_tool.py +46 -15
  31. uipath_langchain/agent/tools/integration_tool.py +20 -16
  32. uipath_langchain/agent/tools/internal_tools/__init__.py +5 -0
  33. uipath_langchain/agent/tools/internal_tools/analyze_files_tool.py +113 -0
  34. uipath_langchain/agent/tools/internal_tools/internal_tool_factory.py +54 -0
  35. uipath_langchain/agent/tools/mcp_tool.py +86 -0
  36. uipath_langchain/agent/tools/process_tool.py +8 -1
  37. uipath_langchain/agent/tools/static_args.py +18 -40
  38. uipath_langchain/agent/tools/tool_factory.py +13 -5
  39. uipath_langchain/agent/tools/tool_node.py +133 -4
  40. uipath_langchain/agent/tools/utils.py +31 -0
  41. uipath_langchain/agent/wrappers/__init__.py +6 -0
  42. uipath_langchain/agent/wrappers/job_attachment_wrapper.py +62 -0
  43. uipath_langchain/agent/wrappers/static_args_wrapper.py +34 -0
  44. uipath_langchain/chat/__init__.py +4 -0
  45. uipath_langchain/chat/bedrock.py +16 -0
  46. uipath_langchain/chat/mapper.py +60 -42
  47. uipath_langchain/chat/openai.py +56 -26
  48. uipath_langchain/chat/supported_models.py +9 -0
  49. uipath_langchain/chat/vertex.py +62 -46
  50. uipath_langchain/embeddings/embeddings.py +18 -12
  51. uipath_langchain/runtime/factory.py +10 -5
  52. uipath_langchain/runtime/runtime.py +38 -35
  53. uipath_langchain/runtime/schema.py +72 -16
  54. uipath_langchain/runtime/storage.py +178 -71
  55. {uipath_langchain-0.1.28.dist-info → uipath_langchain-0.3.1.dist-info}/METADATA +7 -4
  56. uipath_langchain-0.3.1.dist-info/RECORD +90 -0
  57. uipath_langchain-0.1.28.dist-info/RECORD +0 -76
  58. {uipath_langchain-0.1.28.dist-info → uipath_langchain-0.3.1.dist-info}/WHEEL +0 -0
  59. {uipath_langchain-0.1.28.dist-info → uipath_langchain-0.3.1.dist-info}/entry_points.txt +0 -0
  60. {uipath_langchain-0.1.28.dist-info → uipath_langchain-0.3.1.dist-info}/licenses/LICENSE +0 -0
@@ -12,21 +12,41 @@ from .supported_models import OpenAIModels
12
12
  logger = logging.getLogger(__name__)
13
13
 
14
14
 
15
+ def _rewrite_openai_url(
16
+ original_url: str, params: httpx.QueryParams
17
+ ) -> httpx.URL | None:
18
+ """Rewrite OpenAI URLs to UiPath gateway completions endpoint.
19
+
20
+ Handles three URL patterns:
21
+ - responses: false -> .../openai/deployments/.../chat/completions?api-version=...
22
+ - responses: true -> .../openai/responses?api-version=...
23
+ - responses API base -> .../{model}?api-version=... (no /openai/ path)
24
+
25
+ All are rewritten to .../completions
26
+ """
27
+ if "/openai/deployments/" in original_url:
28
+ base_url = original_url.split("/openai/deployments/")[0]
29
+ elif "/openai/responses" in original_url:
30
+ base_url = original_url.split("/openai/responses")[0]
31
+ else:
32
+ # Handle base URL case (no /openai/ path appended yet)
33
+ # Strip query string to get base URL
34
+ base_url = original_url.split("?")[0]
35
+
36
+ new_url_str = f"{base_url}/completions"
37
+ if params:
38
+ return httpx.URL(new_url_str, params=params)
39
+ return httpx.URL(new_url_str)
40
+
41
+
15
42
  class UiPathURLRewriteTransport(httpx.AsyncHTTPTransport):
16
43
  def __init__(self, verify: bool = True, **kwargs):
17
44
  super().__init__(verify=verify, **kwargs)
18
45
 
19
46
  async def handle_async_request(self, request: httpx.Request) -> httpx.Response:
20
- original_url = str(request.url)
21
-
22
- if "/openai/deployments/" in original_url:
23
- base_url = original_url.split("/openai/deployments/")[0]
24
- query_string = request.url.params
25
- new_url_str = f"{base_url}/completions"
26
- if query_string:
27
- request.url = httpx.URL(new_url_str, params=query_string)
28
- else:
29
- request.url = httpx.URL(new_url_str)
47
+ new_url = _rewrite_openai_url(str(request.url), request.url.params)
48
+ if new_url:
49
+ request.url = new_url
30
50
 
31
51
  return await super().handle_async_request(request)
32
52
 
@@ -36,16 +56,9 @@ class UiPathSyncURLRewriteTransport(httpx.HTTPTransport):
36
56
  super().__init__(verify=verify, **kwargs)
37
57
 
38
58
  def handle_request(self, request: httpx.Request) -> httpx.Response:
39
- original_url = str(request.url)
40
-
41
- if "/openai/deployments/" in original_url:
42
- base_url = original_url.split("/openai/deployments/")[0]
43
- query_string = request.url.params
44
- new_url_str = f"{base_url}/completions"
45
- if query_string:
46
- request.url = httpx.URL(new_url_str, params=query_string)
47
- else:
48
- request.url = httpx.URL(new_url_str)
59
+ new_url = _rewrite_openai_url(str(request.url), request.url.params)
60
+ if new_url:
61
+ request.url = new_url
49
62
 
50
63
  return super().handle_request(request)
51
64
 
@@ -58,6 +71,9 @@ class UiPathChatOpenAI(AzureChatOpenAI):
58
71
  api_version: str = "2024-12-01-preview",
59
72
  org_id: Optional[str] = None,
60
73
  tenant_id: Optional[str] = None,
74
+ agenthub_config: Optional[str] = None,
75
+ extra_headers: Optional[dict[str, str]] = None,
76
+ byo_connection_id: Optional[str] = None,
61
77
  **kwargs,
62
78
  ):
63
79
  org_id = org_id or os.getenv("UIPATH_ORGANIZATION_ID")
@@ -81,18 +97,24 @@ class UiPathChatOpenAI(AzureChatOpenAI):
81
97
  self._vendor = "openai"
82
98
  self._model_name = model_name
83
99
  self._url: Optional[str] = None
100
+ self._agenthub_config = agenthub_config
101
+ self._byo_connection_id = byo_connection_id
102
+ self._extra_headers = extra_headers or {}
103
+
104
+ client_kwargs = get_httpx_client_kwargs()
105
+ verify = client_kwargs.get("verify", True)
84
106
 
85
107
  super().__init__(
86
108
  azure_endpoint=self._build_base_url(),
87
109
  model_name=model_name,
88
110
  default_headers=self._build_headers(token),
89
111
  http_async_client=httpx.AsyncClient(
90
- transport=UiPathURLRewriteTransport(verify=True),
91
- **get_httpx_client_kwargs(),
112
+ transport=UiPathURLRewriteTransport(verify=verify),
113
+ **client_kwargs,
92
114
  ),
93
115
  http_client=httpx.Client(
94
- transport=UiPathSyncURLRewriteTransport(verify=True),
95
- **get_httpx_client_kwargs(),
116
+ transport=UiPathSyncURLRewriteTransport(verify=verify),
117
+ **client_kwargs,
96
118
  ),
97
119
  api_key=token,
98
120
  api_version=api_version,
@@ -105,10 +127,18 @@ class UiPathChatOpenAI(AzureChatOpenAI):
105
127
  "X-UiPath-LlmGateway-ApiFlavor": "auto",
106
128
  "Authorization": f"Bearer {token}",
107
129
  }
130
+
131
+ if self._agenthub_config:
132
+ headers["X-UiPath-AgentHub-Config"] = self._agenthub_config
133
+ if self._byo_connection_id:
134
+ headers["X-UiPath-LlmGateway-ByoIsConnectionId"] = self._byo_connection_id
108
135
  if job_key := os.getenv("UIPATH_JOB_KEY"):
109
136
  headers["X-UiPath-JobKey"] = job_key
110
137
  if process_key := os.getenv("UIPATH_PROCESS_KEY"):
111
138
  headers["X-UiPath-ProcessKey"] = process_key
139
+
140
+ # Allow extra_headers to override defaults
141
+ headers.update(self._extra_headers)
112
142
  return headers
113
143
 
114
144
  @property
@@ -117,9 +147,9 @@ class UiPathChatOpenAI(AzureChatOpenAI):
117
147
  formatted_endpoint = vendor_endpoint.format(
118
148
  vendor=self._vendor,
119
149
  model=self._model_name,
120
- api_version=self._openai_api_version,
121
150
  )
122
- return formatted_endpoint.replace("/completions", "")
151
+ base_endpoint = formatted_endpoint.replace("/completions", "")
152
+ return f"{base_endpoint}?api-version={self._openai_api_version}"
123
153
 
124
154
  def _build_base_url(self) -> str:
125
155
  if not self._url:
@@ -21,14 +21,21 @@ class OpenAIModels:
21
21
  # GPT-5.1 models
22
22
  gpt_5_1_2025_11_13 = "gpt-5.1-2025-11-13"
23
23
 
24
+ # GPT-5.2 models
25
+ gpt_5_2_2025_12_11 = "gpt-5.2-2025-12-11"
26
+
24
27
 
25
28
  class GeminiModels:
26
29
  """Supported Google Gemini model identifiers."""
27
30
 
31
+ # Gemini 2 models
28
32
  gemini_2_5_pro = "gemini-2.5-pro"
29
33
  gemini_2_5_flash = "gemini-2.5-flash"
30
34
  gemini_2_0_flash_001 = "gemini-2.0-flash-001"
31
35
 
36
+ # Gemini 3 models
37
+ gemini_3_pro_preview = "gemini-3-pro-preview"
38
+
32
39
 
33
40
  class BedrockModels:
34
41
  """Supported AWS Bedrock model identifiers."""
@@ -38,5 +45,7 @@ class BedrockModels:
38
45
 
39
46
  # Claude 4 models
40
47
  anthropic_claude_sonnet_4 = "anthropic.claude-sonnet-4-20250514-v1:0"
48
+
49
+ # Claude 4.5 models
41
50
  anthropic_claude_sonnet_4_5 = "anthropic.claude-sonnet-4-5-20250929-v1:0"
42
51
  anthropic_claude_haiku_4_5 = "anthropic.claude-haiku-4-5-20251001-v1:0"
@@ -40,60 +40,59 @@ from langchain_google_genai import ChatGoogleGenerativeAI
40
40
  from pydantic import PrivateAttr
41
41
 
42
42
 
43
- def _rewrite_request_for_gateway(
44
- request: httpx.Request, gateway_url: str
45
- ) -> httpx.Request:
46
- """Rewrite a request to redirect to the UiPath gateway."""
47
- url_str = str(request.url)
48
- if "generateContent" in url_str or "streamGenerateContent" in url_str:
49
- is_streaming = "alt=sse" in url_str
50
-
51
- headers = dict(request.headers)
52
-
53
- headers["X-UiPath-Streaming-Enabled"] = "true" if is_streaming else "false"
54
-
55
- gateway_url_parsed = httpx.URL(gateway_url)
56
- if gateway_url_parsed.host:
57
- headers["host"] = gateway_url_parsed.host
58
-
59
- return httpx.Request(
60
- method=request.method,
61
- url=gateway_url,
62
- headers=headers,
63
- content=request.content,
64
- extensions=request.extensions,
65
- )
66
- return request
43
+ def _rewrite_vertex_url(original_url: str, gateway_url: str) -> httpx.URL | None:
44
+ """Rewrite Google GenAI URLs to UiPath gateway endpoint.
45
+
46
+ Handles URL patterns containing generateContent or streamGenerateContent.
47
+ Returns the gateway URL, or None if no rewrite needed.
48
+ """
49
+ if "generateContent" in original_url or "streamGenerateContent" in original_url:
50
+ return httpx.URL(gateway_url)
51
+ return None
67
52
 
68
53
 
69
- class _UrlRewriteTransport(httpx.BaseTransport):
54
+ class _UrlRewriteTransport(httpx.HTTPTransport):
70
55
  """Transport that rewrites URLs to redirect to UiPath gateway."""
71
56
 
72
- def __init__(self, gateway_url: str):
57
+ def __init__(self, gateway_url: str, verify: bool = True):
58
+ super().__init__(verify=verify)
73
59
  self.gateway_url = gateway_url
74
- self._transport = httpx.HTTPTransport()
75
60
 
76
61
  def handle_request(self, request: httpx.Request) -> httpx.Response:
77
- request = _rewrite_request_for_gateway(request, self.gateway_url)
78
- return self._transport.handle_request(request)
79
-
80
- def close(self) -> None:
81
- self._transport.close()
62
+ original_url = str(request.url)
63
+ new_url = _rewrite_vertex_url(original_url, self.gateway_url)
64
+ if new_url:
65
+ # Set streaming header based on original URL before modifying
66
+ is_streaming = "alt=sse" in original_url
67
+ request.headers["X-UiPath-Streaming-Enabled"] = (
68
+ "true" if is_streaming else "false"
69
+ )
70
+ # Update host header to match the new URL
71
+ request.headers["host"] = new_url.host
72
+ request.url = new_url
73
+ return super().handle_request(request)
82
74
 
83
75
 
84
- class _AsyncUrlRewriteTransport(httpx.AsyncBaseTransport):
76
+ class _AsyncUrlRewriteTransport(httpx.AsyncHTTPTransport):
85
77
  """Async transport that rewrites URLs to redirect to UiPath gateway."""
86
78
 
87
- def __init__(self, gateway_url: str):
79
+ def __init__(self, gateway_url: str, verify: bool = True):
80
+ super().__init__(verify=verify)
88
81
  self.gateway_url = gateway_url
89
- self._transport = httpx.AsyncHTTPTransport()
90
82
 
91
83
  async def handle_async_request(self, request: httpx.Request) -> httpx.Response:
92
- request = _rewrite_request_for_gateway(request, self.gateway_url)
93
- return await self._transport.handle_async_request(request)
94
-
95
- async def aclose(self) -> None:
96
- await self._transport.aclose()
84
+ original_url = str(request.url)
85
+ new_url = _rewrite_vertex_url(original_url, self.gateway_url)
86
+ if new_url:
87
+ # Set streaming header based on original URL before modifying
88
+ is_streaming = "alt=sse" in original_url
89
+ request.headers["X-UiPath-Streaming-Enabled"] = (
90
+ "true" if is_streaming else "false"
91
+ )
92
+ # Update host header to match the new URL
93
+ request.headers["host"] = new_url.host
94
+ request.url = new_url
95
+ return await super().handle_async_request(request)
97
96
 
98
97
 
99
98
  class UiPathChatVertex(ChatGoogleGenerativeAI):
@@ -103,6 +102,8 @@ class UiPathChatVertex(ChatGoogleGenerativeAI):
103
102
  _model_name: str = PrivateAttr()
104
103
  _uipath_token: str = PrivateAttr()
105
104
  _uipath_llmgw_url: Optional[str] = PrivateAttr(default=None)
105
+ _agenthub_config: Optional[str] = PrivateAttr(default=None)
106
+ _byo_connection_id: Optional[str] = PrivateAttr(default=None)
106
107
 
107
108
  def __init__(
108
109
  self,
@@ -111,6 +112,8 @@ class UiPathChatVertex(ChatGoogleGenerativeAI):
111
112
  token: Optional[str] = None,
112
113
  model_name: str = GeminiModels.gemini_2_5_flash,
113
114
  temperature: Optional[float] = None,
115
+ agenthub_config: Optional[str] = None,
116
+ byo_connection_id: Optional[str] = None,
114
117
  **kwargs: Any,
115
118
  ):
116
119
  org_id = org_id or os.getenv("UIPATH_ORGANIZATION_ID")
@@ -131,18 +134,21 @@ class UiPathChatVertex(ChatGoogleGenerativeAI):
131
134
  )
132
135
 
133
136
  uipath_url = self._build_base_url(model_name)
134
- headers = self._build_headers(token)
137
+ headers = self._build_headers(token, agenthub_config, byo_connection_id)
138
+
139
+ client_kwargs = get_httpx_client_kwargs()
140
+ verify = client_kwargs.get("verify", True)
135
141
 
136
142
  http_options = genai_types.HttpOptions(
137
143
  httpx_client=httpx.Client(
138
- transport=_UrlRewriteTransport(uipath_url),
144
+ transport=_UrlRewriteTransport(uipath_url, verify=verify),
139
145
  headers=headers,
140
- **get_httpx_client_kwargs(),
146
+ **client_kwargs,
141
147
  ),
142
148
  httpx_async_client=httpx.AsyncClient(
143
- transport=_AsyncUrlRewriteTransport(uipath_url),
149
+ transport=_AsyncUrlRewriteTransport(uipath_url, verify=verify),
144
150
  headers=headers,
145
- **get_httpx_client_kwargs(),
151
+ **client_kwargs,
146
152
  ),
147
153
  )
148
154
 
@@ -168,6 +174,8 @@ class UiPathChatVertex(ChatGoogleGenerativeAI):
168
174
  self._model_name = model_name
169
175
  self._uipath_token = token
170
176
  self._uipath_llmgw_url = uipath_url
177
+ self._agenthub_config = agenthub_config
178
+ self._byo_connection_id = byo_connection_id
171
179
 
172
180
  if self.temperature is not None and not 0 <= self.temperature <= 2.0:
173
181
  raise ValueError("temperature must be in the range [0.0, 2.0]")
@@ -182,11 +190,19 @@ class UiPathChatVertex(ChatGoogleGenerativeAI):
182
190
  self.default_metadata = tuple(additional_headers.items())
183
191
 
184
192
  @staticmethod
185
- def _build_headers(token: str) -> dict[str, str]:
193
+ def _build_headers(
194
+ token: str,
195
+ agenthub_config: Optional[str] = None,
196
+ byo_connection_id: Optional[str] = None,
197
+ ) -> dict[str, str]:
186
198
  """Build HTTP headers for UiPath Gateway requests."""
187
199
  headers = {
188
200
  "Authorization": f"Bearer {token}",
189
201
  }
202
+ if agenthub_config:
203
+ headers["X-UiPath-AgentHub-Config"] = agenthub_config
204
+ if byo_connection_id:
205
+ headers["X-UiPath-LlmGateway-ByoIsConnectionId"] = byo_connection_id
190
206
  if job_key := os.getenv("UIPATH_JOB_KEY"):
191
207
  headers["X-UiPath-JobKey"] = job_key
192
208
  if process_key := os.getenv("UIPATH_PROCESS_KEY"):
@@ -4,6 +4,7 @@ from typing import Any
4
4
  import httpx
5
5
  from langchain_openai.embeddings import AzureOpenAIEmbeddings, OpenAIEmbeddings
6
6
  from pydantic import Field
7
+ from uipath._utils._ssl_context import get_httpx_client_kwargs
7
8
  from uipath.utils import EndpointManager
8
9
 
9
10
  from uipath_langchain._utils._request_mixin import UiPathRequestMixin
@@ -26,19 +27,24 @@ class UiPathAzureOpenAIEmbeddings(UiPathRequestMixin, AzureOpenAIEmbeddings):
26
27
  )
27
28
 
28
29
  def __init__(self, **kwargs):
30
+ default_client_kwargs = get_httpx_client_kwargs()
31
+ client_kwargs = {
32
+ **default_client_kwargs,
33
+ "event_hooks": {
34
+ "request": [self._log_request_duration],
35
+ "response": [self._log_response_duration],
36
+ },
37
+ }
38
+ aclient_kwargs = {
39
+ **default_client_kwargs,
40
+ "event_hooks": {
41
+ "request": [self._alog_request_duration],
42
+ "response": [self._alog_response_duration],
43
+ },
44
+ }
29
45
  super().__init__(
30
- http_client=httpx.Client(
31
- event_hooks={
32
- "request": [self._log_request_duration],
33
- "response": [self._log_response_duration],
34
- }
35
- ),
36
- http_async_client=httpx.AsyncClient(
37
- event_hooks={
38
- "request": [self._alog_request_duration],
39
- "response": [self._alog_response_duration],
40
- }
41
- ),
46
+ http_client=httpx.Client(**client_kwargs),
47
+ http_async_client=httpx.AsyncClient(**aclient_kwargs),
42
48
  **kwargs,
43
49
  )
44
50
  # Monkey-patch the OpenAI client to use your custom methods
@@ -92,7 +92,7 @@ class UiPathLangGraphRuntimeFactory:
92
92
  return self._config
93
93
 
94
94
  async def _load_graph(
95
- self, entrypoint: str
95
+ self, entrypoint: str, **kwargs
96
96
  ) -> StateGraph[Any, Any, Any] | CompiledStateGraph[Any, Any, Any, Any]:
97
97
  """
98
98
  Load a graph for the given entrypoint.
@@ -181,7 +181,7 @@ class UiPathLangGraphRuntimeFactory:
181
181
  return builder.compile(checkpointer=memory)
182
182
 
183
183
  async def _resolve_and_compile_graph(
184
- self, entrypoint: str, memory: AsyncSqliteSaver
184
+ self, entrypoint: str, memory: AsyncSqliteSaver, **kwargs
185
185
  ) -> CompiledStateGraph[Any, Any, Any, Any]:
186
186
  """
187
187
  Resolve a graph from configuration and compile it.
@@ -201,7 +201,7 @@ class UiPathLangGraphRuntimeFactory:
201
201
  if entrypoint in self._graph_cache:
202
202
  return self._graph_cache[entrypoint]
203
203
 
204
- loaded_graph = await self._load_graph(entrypoint)
204
+ loaded_graph = await self._load_graph(entrypoint, **kwargs)
205
205
 
206
206
  compiled_graph = await self._compile_graph(loaded_graph, memory)
207
207
 
@@ -249,6 +249,7 @@ class UiPathLangGraphRuntimeFactory:
249
249
  compiled_graph: CompiledStateGraph[Any, Any, Any, Any],
250
250
  runtime_id: str,
251
251
  entrypoint: str,
252
+ **kwargs,
252
253
  ) -> UiPathRuntimeProtocol:
253
254
  """
254
255
  Create a runtime instance from a compiled graph.
@@ -275,10 +276,11 @@ class UiPathLangGraphRuntimeFactory:
275
276
  delegate=base_runtime,
276
277
  storage=storage,
277
278
  trigger_manager=trigger_manager,
279
+ runtime_id=runtime_id,
278
280
  )
279
281
 
280
282
  async def new_runtime(
281
- self, entrypoint: str, runtime_id: str
283
+ self, entrypoint: str, runtime_id: str, **kwargs
282
284
  ) -> UiPathRuntimeProtocol:
283
285
  """
284
286
  Create a new LangGraph runtime instance.
@@ -293,12 +295,15 @@ class UiPathLangGraphRuntimeFactory:
293
295
  # Get shared memory instance
294
296
  memory = await self._get_memory()
295
297
 
296
- compiled_graph = await self._resolve_and_compile_graph(entrypoint, memory)
298
+ compiled_graph = await self._resolve_and_compile_graph(
299
+ entrypoint, memory, **kwargs
300
+ )
297
301
 
298
302
  return await self._create_runtime_instance(
299
303
  compiled_graph=compiled_graph,
300
304
  runtime_id=runtime_id,
301
305
  entrypoint=entrypoint,
306
+ **kwargs,
302
307
  )
303
308
 
304
309
  async def dispose(self) -> None:
@@ -3,6 +3,7 @@ import os
3
3
  from typing import Any, AsyncGenerator
4
4
  from uuid import uuid4
5
5
 
6
+ from langchain_core.callbacks import BaseCallbackHandler
6
7
  from langchain_core.runnables.config import RunnableConfig
7
8
  from langgraph.errors import EmptyInputError, GraphRecursionError, InvalidUpdateError
8
9
  from langgraph.graph.state import CompiledStateGraph
@@ -41,6 +42,7 @@ class UiPathLangGraphRuntime:
41
42
  graph: CompiledStateGraph[Any, Any, Any, Any],
42
43
  runtime_id: str | None = None,
43
44
  entrypoint: str | None = None,
45
+ callbacks: list[BaseCallbackHandler] | None = None,
44
46
  ):
45
47
  """
46
48
  Initialize the runtime.
@@ -53,6 +55,7 @@ class UiPathLangGraphRuntime:
53
55
  self.graph: CompiledStateGraph[Any, Any, Any, Any] = graph
54
56
  self.runtime_id: str = runtime_id or "default"
55
57
  self.entrypoint: str | None = entrypoint
58
+ self.callbacks: list[BaseCallbackHandler] = callbacks or []
56
59
  self.chat = UiPathChatMessagesMapper()
57
60
  self._middleware_node_names: set[str] = self._detect_middleware_nodes()
58
61
 
@@ -135,10 +138,17 @@ class UiPathLangGraphRuntime:
135
138
  if chunk_type == "messages":
136
139
  if isinstance(data, tuple):
137
140
  message, _ = data
138
- event = UiPathRuntimeMessageEvent(
139
- payload=self.chat.map_event(message),
140
- )
141
- yield event
141
+ try:
142
+ events = self.chat.map_event(message)
143
+ except Exception as e:
144
+ logger.warning(f"Error mapping message event: {e}")
145
+ events = None
146
+ if events:
147
+ for mapped_event in events:
148
+ event = UiPathRuntimeMessageEvent(
149
+ payload=mapped_event,
150
+ )
151
+ yield event
142
152
 
143
153
  # Emit UiPathRuntimeStateEvent for state updates
144
154
  elif chunk_type == "updates":
@@ -153,6 +163,8 @@ class UiPathLangGraphRuntime:
153
163
 
154
164
  # Emit state update event for each node
155
165
  for node_name, agent_data in data.items():
166
+ if node_name in ("__metadata__",):
167
+ continue
156
168
  if isinstance(agent_data, dict):
157
169
  state_event = UiPathRuntimeStateEvent(
158
170
  payload=serialize_output(agent_data),
@@ -189,7 +201,7 @@ class UiPathLangGraphRuntime:
189
201
  """Build graph execution configuration."""
190
202
  graph_config: RunnableConfig = {
191
203
  "configurable": {"thread_id": self.runtime_id},
192
- "callbacks": [],
204
+ "callbacks": self.callbacks,
193
205
  }
194
206
 
195
207
  # Add optional config from environment
@@ -283,29 +295,9 @@ class UiPathLangGraphRuntime:
283
295
 
284
296
  def _is_interrupted(self, state: StateSnapshot) -> bool:
285
297
  """Check if execution was interrupted (static or dynamic)."""
286
- # Check for static interrupts (interrupt_before/after)
287
- if hasattr(state, "next") and state.next:
288
- return True
289
-
290
- # Check for dynamic interrupts (interrupt() inside node)
291
- if hasattr(state, "tasks"):
292
- for task in state.tasks:
293
- if hasattr(task, "interrupts") and task.interrupts:
294
- return True
295
-
296
- return False
297
-
298
- def _get_dynamic_interrupt(self, state: StateSnapshot) -> Interrupt | None:
299
- """Get the first dynamic interrupt if any."""
300
- if not hasattr(state, "tasks"):
301
- return None
302
-
303
- for task in state.tasks:
304
- if hasattr(task, "interrupts") and task.interrupts:
305
- for interrupt in task.interrupts:
306
- if isinstance(interrupt, Interrupt):
307
- return interrupt
308
- return None
298
+ # An execution is considered interrupted if there are any next nodes (static interrupt)
299
+ # or if there are any dynamic interrupts present
300
+ return bool(state.next) or bool(state.interrupts)
309
301
 
310
302
  async def _create_runtime_result(
311
303
  self,
@@ -334,13 +326,24 @@ class UiPathLangGraphRuntime:
334
326
  graph_state: StateSnapshot,
335
327
  ) -> UiPathRuntimeResult:
336
328
  """Create result for suspended execution."""
337
- # Check if it's a dynamic interrupt
338
- dynamic_interrupt = self._get_dynamic_interrupt(graph_state)
339
-
340
- if dynamic_interrupt:
341
- # Dynamic interrupt - should create and save resume trigger
329
+ interrupt_map: dict[str, Any] = {}
330
+
331
+ if graph_state.interrupts:
332
+ for interrupt in graph_state.interrupts:
333
+ if isinstance(interrupt, Interrupt):
334
+ # Find which task this interrupt belongs to
335
+ for task in graph_state.tasks:
336
+ if task.interrupts and interrupt in task.interrupts:
337
+ # Only include if this task is still waiting for interrupt resolution
338
+ if task.interrupts and not task.result:
339
+ interrupt_map[interrupt.id] = interrupt.value
340
+ break
341
+
342
+ # If we have dynamic interrupts, return suspended with interrupt map
343
+ # The output is used to create the resume triggers
344
+ if interrupt_map:
342
345
  return UiPathRuntimeResult(
343
- output=dynamic_interrupt.value,
346
+ output=interrupt_map,
344
347
  status=UiPathRuntimeStatus.SUSPENDED,
345
348
  )
346
349
  else:
@@ -360,7 +363,7 @@ class UiPathLangGraphRuntime:
360
363
  if next_nodes:
361
364
  # Breakpoint is BEFORE these nodes (interrupt_before)
362
365
  breakpoint_type = "before"
363
- breakpoint_node = next_nodes[0]
366
+ breakpoint_node = ", ".join(next_nodes)
364
367
  else:
365
368
  # Breakpoint is AFTER the last executed node (interrupt_after)
366
369
  # Get the last executed node from tasks