uipath-langchain 0.0.133__py3-none-any.whl → 0.1.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uipath_langchain/_cli/cli_init.py +130 -191
- uipath_langchain/_cli/cli_new.py +2 -3
- uipath_langchain/_resources/AGENTS.md +21 -0
- uipath_langchain/_resources/REQUIRED_STRUCTURE.md +92 -0
- uipath_langchain/_tracing/__init__.py +3 -2
- uipath_langchain/_tracing/_instrument_traceable.py +11 -12
- uipath_langchain/_utils/_request_mixin.py +327 -51
- uipath_langchain/_utils/_settings.py +2 -2
- uipath_langchain/agent/exceptions/__init__.py +6 -0
- uipath_langchain/agent/exceptions/exceptions.py +11 -0
- uipath_langchain/agent/guardrails/__init__.py +21 -0
- uipath_langchain/agent/guardrails/actions/__init__.py +11 -0
- uipath_langchain/agent/guardrails/actions/base_action.py +23 -0
- uipath_langchain/agent/guardrails/actions/block_action.py +41 -0
- uipath_langchain/agent/guardrails/actions/escalate_action.py +274 -0
- uipath_langchain/agent/guardrails/actions/log_action.py +57 -0
- uipath_langchain/agent/guardrails/guardrail_nodes.py +125 -0
- uipath_langchain/agent/guardrails/guardrails_factory.py +70 -0
- uipath_langchain/agent/guardrails/guardrails_subgraph.py +247 -0
- uipath_langchain/agent/guardrails/types.py +20 -0
- uipath_langchain/agent/react/__init__.py +14 -0
- uipath_langchain/agent/react/agent.py +113 -0
- uipath_langchain/agent/react/constants.py +2 -0
- uipath_langchain/agent/react/init_node.py +20 -0
- uipath_langchain/agent/react/llm_node.py +43 -0
- uipath_langchain/agent/react/router.py +97 -0
- uipath_langchain/agent/react/terminate_node.py +82 -0
- uipath_langchain/agent/react/tools/__init__.py +7 -0
- uipath_langchain/agent/react/tools/tools.py +50 -0
- uipath_langchain/agent/react/types.py +39 -0
- uipath_langchain/agent/react/utils.py +49 -0
- uipath_langchain/agent/tools/__init__.py +17 -0
- uipath_langchain/agent/tools/context_tool.py +53 -0
- uipath_langchain/agent/tools/escalation_tool.py +111 -0
- uipath_langchain/agent/tools/integration_tool.py +181 -0
- uipath_langchain/agent/tools/process_tool.py +49 -0
- uipath_langchain/agent/tools/static_args.py +138 -0
- uipath_langchain/agent/tools/structured_tool_with_output_type.py +14 -0
- uipath_langchain/agent/tools/tool_factory.py +45 -0
- uipath_langchain/agent/tools/tool_node.py +22 -0
- uipath_langchain/agent/tools/utils.py +11 -0
- uipath_langchain/chat/__init__.py +4 -0
- uipath_langchain/chat/bedrock.py +187 -0
- uipath_langchain/chat/gemini.py +330 -0
- uipath_langchain/chat/mapper.py +309 -0
- uipath_langchain/chat/models.py +248 -35
- uipath_langchain/chat/openai.py +132 -0
- uipath_langchain/chat/supported_models.py +42 -0
- uipath_langchain/embeddings/embeddings.py +131 -34
- uipath_langchain/middlewares.py +0 -6
- uipath_langchain/retrievers/context_grounding_retriever.py +7 -9
- uipath_langchain/runtime/__init__.py +36 -0
- uipath_langchain/runtime/_serialize.py +46 -0
- uipath_langchain/runtime/config.py +61 -0
- uipath_langchain/runtime/errors.py +43 -0
- uipath_langchain/runtime/factory.py +315 -0
- uipath_langchain/runtime/graph.py +159 -0
- uipath_langchain/runtime/runtime.py +453 -0
- uipath_langchain/runtime/schema.py +349 -0
- uipath_langchain/runtime/storage.py +115 -0
- uipath_langchain/vectorstores/context_grounding_vectorstore.py +90 -110
- {uipath_langchain-0.0.133.dist-info → uipath_langchain-0.1.24.dist-info}/METADATA +42 -22
- uipath_langchain-0.1.24.dist-info/RECORD +76 -0
- {uipath_langchain-0.0.133.dist-info → uipath_langchain-0.1.24.dist-info}/WHEEL +1 -1
- uipath_langchain-0.1.24.dist-info/entry_points.txt +5 -0
- uipath_langchain/_cli/_runtime/_context.py +0 -21
- uipath_langchain/_cli/_runtime/_conversation.py +0 -298
- uipath_langchain/_cli/_runtime/_exception.py +0 -17
- uipath_langchain/_cli/_runtime/_input.py +0 -139
- uipath_langchain/_cli/_runtime/_output.py +0 -234
- uipath_langchain/_cli/_runtime/_runtime.py +0 -379
- uipath_langchain/_cli/_utils/_graph.py +0 -199
- uipath_langchain/_cli/cli_dev.py +0 -44
- uipath_langchain/_cli/cli_eval.py +0 -78
- uipath_langchain/_cli/cli_run.py +0 -82
- uipath_langchain/_tracing/_oteladapter.py +0 -222
- uipath_langchain/_tracing/_utils.py +0 -28
- uipath_langchain/builder/agent_config.py +0 -191
- uipath_langchain/tools/preconfigured.py +0 -191
- uipath_langchain-0.0.133.dist-info/RECORD +0 -41
- uipath_langchain-0.0.133.dist-info/entry_points.txt +0 -2
- /uipath_langchain/{tools/__init__.py → py.typed} +0 -0
- {uipath_langchain-0.0.133.dist-info → uipath_langchain-0.1.24.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,298 +0,0 @@
|
|
|
1
|
-
import uuid
|
|
2
|
-
from datetime import datetime
|
|
3
|
-
from typing import Any, Dict, List, Optional
|
|
4
|
-
|
|
5
|
-
from langchain_core.messages import (
|
|
6
|
-
AIMessage,
|
|
7
|
-
AIMessageChunk,
|
|
8
|
-
BaseMessage,
|
|
9
|
-
HumanMessage,
|
|
10
|
-
ToolMessage,
|
|
11
|
-
)
|
|
12
|
-
from uipath.agent.conversation import (
|
|
13
|
-
UiPathConversationContentPartChunkEvent,
|
|
14
|
-
UiPathConversationContentPartEndEvent,
|
|
15
|
-
UiPathConversationContentPartEvent,
|
|
16
|
-
UiPathConversationContentPartStartEvent,
|
|
17
|
-
UiPathConversationEvent,
|
|
18
|
-
UiPathConversationExchangeEvent,
|
|
19
|
-
UiPathConversationMessage,
|
|
20
|
-
UiPathConversationMessageEndEvent,
|
|
21
|
-
UiPathConversationMessageEvent,
|
|
22
|
-
UiPathConversationMessageStartEvent,
|
|
23
|
-
UiPathConversationToolCallEndEvent,
|
|
24
|
-
UiPathConversationToolCallEvent,
|
|
25
|
-
UiPathConversationToolCallStartEvent,
|
|
26
|
-
UiPathInlineValue,
|
|
27
|
-
)
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
def _new_id() -> str:
|
|
31
|
-
return str(uuid.uuid4())
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
def _wrap_in_conversation_event(
|
|
35
|
-
msg_event: UiPathConversationMessageEvent,
|
|
36
|
-
exchange_id: Optional[str] = None,
|
|
37
|
-
conversation_id: Optional[str] = None,
|
|
38
|
-
) -> UiPathConversationEvent:
|
|
39
|
-
"""Helper to wrap a message event into a conversation-level event."""
|
|
40
|
-
return UiPathConversationEvent(
|
|
41
|
-
conversation_id=conversation_id or _new_id(),
|
|
42
|
-
exchange=UiPathConversationExchangeEvent(
|
|
43
|
-
exchange_id=exchange_id or _new_id(),
|
|
44
|
-
message=msg_event,
|
|
45
|
-
),
|
|
46
|
-
)
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def _extract_text(content) -> str:
|
|
50
|
-
"""Normalize LangGraph message.content to plain text."""
|
|
51
|
-
if isinstance(content, str):
|
|
52
|
-
return content
|
|
53
|
-
if isinstance(content, list):
|
|
54
|
-
return "".join(
|
|
55
|
-
part.get("text", "")
|
|
56
|
-
for part in content
|
|
57
|
-
if isinstance(part, dict) and part.get("type") == "text"
|
|
58
|
-
)
|
|
59
|
-
return str(content or "")
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
def uipath_to_human_messages(
|
|
63
|
-
uipath_msg: UiPathConversationMessage,
|
|
64
|
-
) -> List[HumanMessage]:
|
|
65
|
-
"""
|
|
66
|
-
Converts a UiPathConversationMessage into a list of HumanMessages for LangGraph.
|
|
67
|
-
Supports multimodal content parts (text, external content) and preserves metadata.
|
|
68
|
-
"""
|
|
69
|
-
human_messages = []
|
|
70
|
-
|
|
71
|
-
# Loop over each content part
|
|
72
|
-
if uipath_msg.content_parts:
|
|
73
|
-
for part in uipath_msg.content_parts:
|
|
74
|
-
data = part.data
|
|
75
|
-
content = ""
|
|
76
|
-
metadata: Dict[str, Any] = {
|
|
77
|
-
"message_id": uipath_msg.message_id,
|
|
78
|
-
"content_part_id": part.content_part_id,
|
|
79
|
-
"mime_type": part.mime_type,
|
|
80
|
-
"created_at": uipath_msg.created_at,
|
|
81
|
-
"updated_at": uipath_msg.updated_at,
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
if isinstance(data, UiPathInlineValue):
|
|
85
|
-
content = str(data.inline)
|
|
86
|
-
|
|
87
|
-
# Append a HumanMessage for this content part
|
|
88
|
-
human_messages.append(HumanMessage(content=content, metadata=metadata))
|
|
89
|
-
|
|
90
|
-
# Handle the case where there are no content parts
|
|
91
|
-
else:
|
|
92
|
-
metadata = {
|
|
93
|
-
"message_id": uipath_msg.message_id,
|
|
94
|
-
"role": uipath_msg.role,
|
|
95
|
-
"created_at": uipath_msg.created_at,
|
|
96
|
-
"updated_at": uipath_msg.updated_at,
|
|
97
|
-
}
|
|
98
|
-
human_messages.append(HumanMessage(content="", metadata=metadata))
|
|
99
|
-
|
|
100
|
-
return human_messages
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
def map_message(
|
|
104
|
-
message: BaseMessage,
|
|
105
|
-
exchange_id: Optional[str] = None,
|
|
106
|
-
conversation_id: Optional[str] = None,
|
|
107
|
-
) -> Optional[UiPathConversationEvent]:
|
|
108
|
-
"""Convert LangGraph BaseMessage (chunk or full) into a UiPathConversationEvent."""
|
|
109
|
-
message_id = getattr(message, "id", None) or _new_id()
|
|
110
|
-
timestamp = datetime.now().isoformat()
|
|
111
|
-
|
|
112
|
-
# --- Streaming AIMessageChunk ---
|
|
113
|
-
if isinstance(message, AIMessageChunk):
|
|
114
|
-
msg_event = UiPathConversationMessageEvent(
|
|
115
|
-
message_id=message.id or _new_id(),
|
|
116
|
-
)
|
|
117
|
-
|
|
118
|
-
if message.content == []:
|
|
119
|
-
msg_event.start = UiPathConversationMessageStartEvent(
|
|
120
|
-
role="assistant", timestamp=timestamp
|
|
121
|
-
)
|
|
122
|
-
msg_event.content_part = UiPathConversationContentPartEvent(
|
|
123
|
-
content_part_id=f"chunk-{message.id}-{0}",
|
|
124
|
-
start=UiPathConversationContentPartStartEvent(mime_type="text/plain"),
|
|
125
|
-
)
|
|
126
|
-
|
|
127
|
-
elif isinstance(message.content, list) and message.content:
|
|
128
|
-
for chunk in message.content:
|
|
129
|
-
if not isinstance(chunk, dict):
|
|
130
|
-
continue
|
|
131
|
-
idx = chunk.get("index", 0)
|
|
132
|
-
ctype = chunk.get("type")
|
|
133
|
-
id = chunk.get("id", f"chunk-{message.id}-{idx}")
|
|
134
|
-
|
|
135
|
-
# Start of a tool call
|
|
136
|
-
if ctype == "tool_use":
|
|
137
|
-
msg_event.tool_call = UiPathConversationToolCallEvent(
|
|
138
|
-
tool_call_id=id,
|
|
139
|
-
start=UiPathConversationToolCallStartEvent(
|
|
140
|
-
tool_name=chunk.get("name") or "",
|
|
141
|
-
arguments=UiPathInlineValue(inline=""),
|
|
142
|
-
timestamp=timestamp,
|
|
143
|
-
),
|
|
144
|
-
)
|
|
145
|
-
|
|
146
|
-
# JSON args streaming (content part for tool args)
|
|
147
|
-
elif ctype == "input_json_delta":
|
|
148
|
-
text = chunk.get("partial_json", "")
|
|
149
|
-
# first delta: emit content part start + first chunk
|
|
150
|
-
if text == "":
|
|
151
|
-
msg_event.content_part = UiPathConversationContentPartEvent(
|
|
152
|
-
content_part_id=id,
|
|
153
|
-
start=UiPathConversationContentPartStartEvent(
|
|
154
|
-
mime_type="application/json"
|
|
155
|
-
),
|
|
156
|
-
)
|
|
157
|
-
else:
|
|
158
|
-
msg_event.content_part = UiPathConversationContentPartEvent(
|
|
159
|
-
content_part_id=id,
|
|
160
|
-
chunk=UiPathConversationContentPartChunkEvent(
|
|
161
|
-
data=text,
|
|
162
|
-
content_part_sequence=idx,
|
|
163
|
-
),
|
|
164
|
-
)
|
|
165
|
-
|
|
166
|
-
# Plain text from assistant
|
|
167
|
-
elif ctype == "text":
|
|
168
|
-
text = chunk.get("text", "")
|
|
169
|
-
msg_event.content_part = UiPathConversationContentPartEvent(
|
|
170
|
-
content_part_id=id,
|
|
171
|
-
chunk=UiPathConversationContentPartChunkEvent(
|
|
172
|
-
data=text,
|
|
173
|
-
content_part_sequence=idx,
|
|
174
|
-
),
|
|
175
|
-
)
|
|
176
|
-
|
|
177
|
-
stop_reason = message.response_metadata.get("stop_reason")
|
|
178
|
-
if not message.content and stop_reason in ("tool_use", "end_turn"):
|
|
179
|
-
msg_event.end = UiPathConversationMessageEndEvent(timestamp=timestamp)
|
|
180
|
-
|
|
181
|
-
if (
|
|
182
|
-
msg_event.start
|
|
183
|
-
or msg_event.content_part
|
|
184
|
-
or msg_event.tool_call
|
|
185
|
-
or msg_event.end
|
|
186
|
-
):
|
|
187
|
-
return _wrap_in_conversation_event(msg_event, exchange_id, conversation_id)
|
|
188
|
-
|
|
189
|
-
return None
|
|
190
|
-
|
|
191
|
-
text_content = _extract_text(message.content)
|
|
192
|
-
|
|
193
|
-
# --- HumanMessage ---
|
|
194
|
-
if isinstance(message, HumanMessage):
|
|
195
|
-
return _wrap_in_conversation_event(
|
|
196
|
-
UiPathConversationMessageEvent(
|
|
197
|
-
message_id=message_id,
|
|
198
|
-
start=UiPathConversationMessageStartEvent(
|
|
199
|
-
role="user", timestamp=timestamp
|
|
200
|
-
),
|
|
201
|
-
content_part=UiPathConversationContentPartEvent(
|
|
202
|
-
content_part_id=f"cp-{message_id}",
|
|
203
|
-
start=UiPathConversationContentPartStartEvent(
|
|
204
|
-
mime_type="text/plain"
|
|
205
|
-
),
|
|
206
|
-
chunk=UiPathConversationContentPartChunkEvent(data=text_content),
|
|
207
|
-
end=UiPathConversationContentPartEndEvent(),
|
|
208
|
-
),
|
|
209
|
-
end=UiPathConversationMessageEndEvent(),
|
|
210
|
-
),
|
|
211
|
-
exchange_id,
|
|
212
|
-
conversation_id,
|
|
213
|
-
)
|
|
214
|
-
|
|
215
|
-
# --- AIMessage ---
|
|
216
|
-
if isinstance(message, AIMessage):
|
|
217
|
-
# Extract first tool call if present
|
|
218
|
-
tool_calls = getattr(message, "tool_calls", []) or []
|
|
219
|
-
first_tc = tool_calls[0] if tool_calls else None
|
|
220
|
-
|
|
221
|
-
return _wrap_in_conversation_event(
|
|
222
|
-
UiPathConversationMessageEvent(
|
|
223
|
-
message_id=message_id,
|
|
224
|
-
start=UiPathConversationMessageStartEvent(
|
|
225
|
-
role="assistant", timestamp=timestamp
|
|
226
|
-
),
|
|
227
|
-
content_part=(
|
|
228
|
-
UiPathConversationContentPartEvent(
|
|
229
|
-
content_part_id=f"cp-{message_id}",
|
|
230
|
-
start=UiPathConversationContentPartStartEvent(
|
|
231
|
-
mime_type="text/plain"
|
|
232
|
-
),
|
|
233
|
-
chunk=UiPathConversationContentPartChunkEvent(
|
|
234
|
-
data=text_content
|
|
235
|
-
),
|
|
236
|
-
end=UiPathConversationContentPartEndEvent(),
|
|
237
|
-
)
|
|
238
|
-
if text_content
|
|
239
|
-
else None
|
|
240
|
-
),
|
|
241
|
-
tool_call=(
|
|
242
|
-
UiPathConversationToolCallEvent(
|
|
243
|
-
tool_call_id=first_tc.get("id") or _new_id(),
|
|
244
|
-
start=UiPathConversationToolCallStartEvent(
|
|
245
|
-
tool_name=first_tc.get("name"),
|
|
246
|
-
arguments=UiPathInlineValue(
|
|
247
|
-
inline=str(first_tc.get("args", ""))
|
|
248
|
-
),
|
|
249
|
-
timestamp=timestamp,
|
|
250
|
-
),
|
|
251
|
-
)
|
|
252
|
-
if first_tc
|
|
253
|
-
else None
|
|
254
|
-
),
|
|
255
|
-
end=UiPathConversationMessageEndEvent(),
|
|
256
|
-
),
|
|
257
|
-
exchange_id,
|
|
258
|
-
conversation_id,
|
|
259
|
-
)
|
|
260
|
-
|
|
261
|
-
# --- ToolMessage ---
|
|
262
|
-
if isinstance(message, ToolMessage):
|
|
263
|
-
return _wrap_in_conversation_event(
|
|
264
|
-
UiPathConversationMessageEvent(
|
|
265
|
-
message_id=message_id,
|
|
266
|
-
tool_call=UiPathConversationToolCallEvent(
|
|
267
|
-
tool_call_id=message.tool_call_id,
|
|
268
|
-
start=UiPathConversationToolCallStartEvent(
|
|
269
|
-
tool_name=message.name or "",
|
|
270
|
-
arguments=UiPathInlineValue(inline=""),
|
|
271
|
-
timestamp=timestamp,
|
|
272
|
-
),
|
|
273
|
-
end=UiPathConversationToolCallEndEvent(
|
|
274
|
-
timestamp=timestamp,
|
|
275
|
-
result=UiPathInlineValue(inline=message.content),
|
|
276
|
-
),
|
|
277
|
-
),
|
|
278
|
-
),
|
|
279
|
-
exchange_id,
|
|
280
|
-
conversation_id,
|
|
281
|
-
)
|
|
282
|
-
|
|
283
|
-
# --- Fallback ---
|
|
284
|
-
return _wrap_in_conversation_event(
|
|
285
|
-
UiPathConversationMessageEvent(
|
|
286
|
-
message_id=message_id,
|
|
287
|
-
start=UiPathConversationMessageStartEvent(
|
|
288
|
-
role="assistant", timestamp=timestamp
|
|
289
|
-
),
|
|
290
|
-
content_part=UiPathConversationContentPartEvent(
|
|
291
|
-
content_part_id=f"cp-{message_id}",
|
|
292
|
-
chunk=UiPathConversationContentPartChunkEvent(data=text_content),
|
|
293
|
-
),
|
|
294
|
-
end=UiPathConversationMessageEndEvent(),
|
|
295
|
-
),
|
|
296
|
-
exchange_id,
|
|
297
|
-
conversation_id,
|
|
298
|
-
)
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
from typing import Optional
|
|
2
|
-
|
|
3
|
-
from uipath._cli._runtime._contracts import UiPathErrorCategory, UiPathRuntimeError
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
class LangGraphRuntimeError(UiPathRuntimeError):
|
|
7
|
-
"""Custom exception for LangGraph runtime errors with structured error information."""
|
|
8
|
-
|
|
9
|
-
def __init__(
|
|
10
|
-
self,
|
|
11
|
-
code: str,
|
|
12
|
-
title: str,
|
|
13
|
-
detail: str,
|
|
14
|
-
category: UiPathErrorCategory = UiPathErrorCategory.UNKNOWN,
|
|
15
|
-
status: Optional[int] = None,
|
|
16
|
-
):
|
|
17
|
-
super().__init__(code, title, detail, category, status, prefix="LANGGRAPH")
|
|
@@ -1,139 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
from typing import Any, Optional, cast
|
|
3
|
-
|
|
4
|
-
from langgraph.types import Command
|
|
5
|
-
from uipath._cli._runtime._contracts import (
|
|
6
|
-
UiPathApiTrigger,
|
|
7
|
-
UiPathErrorCategory,
|
|
8
|
-
UiPathResumeTrigger,
|
|
9
|
-
UiPathResumeTriggerType,
|
|
10
|
-
)
|
|
11
|
-
from uipath._cli._runtime._hitl import HitlReader
|
|
12
|
-
|
|
13
|
-
from ._context import LangGraphRuntimeContext
|
|
14
|
-
from ._conversation import uipath_to_human_messages
|
|
15
|
-
from ._exception import LangGraphRuntimeError
|
|
16
|
-
|
|
17
|
-
logger = logging.getLogger(__name__)
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
class LangGraphInputProcessor:
|
|
21
|
-
"""
|
|
22
|
-
Handles input processing for graph execution, including resume scenarios
|
|
23
|
-
where it needs to fetch data from UiPath.
|
|
24
|
-
"""
|
|
25
|
-
|
|
26
|
-
def __init__(self, context: LangGraphRuntimeContext):
|
|
27
|
-
"""
|
|
28
|
-
Initialize the LangGraphInputProcessor.
|
|
29
|
-
|
|
30
|
-
Args:
|
|
31
|
-
context: The runtime context for the graph execution.
|
|
32
|
-
"""
|
|
33
|
-
self.context = context
|
|
34
|
-
|
|
35
|
-
async def process(self) -> Any:
|
|
36
|
-
"""
|
|
37
|
-
Process the input data for graph execution, handling both fresh starts and resume scenarios.
|
|
38
|
-
|
|
39
|
-
This method determines whether the graph is being executed fresh or resumed from a previous state.
|
|
40
|
-
For fresh executions, it returns the input JSON directly. For resume scenarios, it fetches
|
|
41
|
-
the latest trigger information from the database and constructs a Command object with the
|
|
42
|
-
appropriate resume data.
|
|
43
|
-
|
|
44
|
-
The method handles different types of resume triggers:
|
|
45
|
-
- API triggers: Creates an UiPathApiTrigger with inbox_id and request payload
|
|
46
|
-
- Other triggers: Uses the HitlReader to process the resume data
|
|
47
|
-
|
|
48
|
-
Returns:
|
|
49
|
-
Any: For fresh executions, returns the input JSON data directly.
|
|
50
|
-
For resume scenarios, returns a Command object containing the resume data
|
|
51
|
-
processed through the appropriate trigger handler.
|
|
52
|
-
|
|
53
|
-
Raises:
|
|
54
|
-
LangGraphRuntimeError: If there's an error fetching trigger data from the database
|
|
55
|
-
during resume processing.
|
|
56
|
-
"""
|
|
57
|
-
logger.debug(f"Resumed: {self.context.resume} Input: {self.context.input_json}")
|
|
58
|
-
|
|
59
|
-
if not self.context.resume:
|
|
60
|
-
if self.context.input_message:
|
|
61
|
-
return {
|
|
62
|
-
"messages": uipath_to_human_messages(self.context.input_message)
|
|
63
|
-
}
|
|
64
|
-
return self.context.input_json
|
|
65
|
-
|
|
66
|
-
if self.context.input_json:
|
|
67
|
-
return Command(resume=self.context.input_json)
|
|
68
|
-
|
|
69
|
-
trigger = await self._get_latest_trigger()
|
|
70
|
-
if not trigger:
|
|
71
|
-
return Command(resume=self.context.input_json)
|
|
72
|
-
|
|
73
|
-
trigger_type, key, folder_path, folder_key, payload = trigger
|
|
74
|
-
resume_trigger = UiPathResumeTrigger(
|
|
75
|
-
trigger_type=trigger_type,
|
|
76
|
-
item_key=key,
|
|
77
|
-
folder_path=folder_path,
|
|
78
|
-
folder_key=folder_key,
|
|
79
|
-
payload=payload,
|
|
80
|
-
)
|
|
81
|
-
logger.debug(f"ResumeTrigger: {trigger_type} {key}")
|
|
82
|
-
|
|
83
|
-
# populate back expected fields for api_triggers
|
|
84
|
-
if resume_trigger.trigger_type == UiPathResumeTriggerType.API:
|
|
85
|
-
resume_trigger.api_resume = UiPathApiTrigger(
|
|
86
|
-
inbox_id=resume_trigger.item_key, request=resume_trigger.payload
|
|
87
|
-
)
|
|
88
|
-
return Command(resume=await HitlReader.read(resume_trigger))
|
|
89
|
-
|
|
90
|
-
async def _get_latest_trigger(self) -> Optional[tuple[str, str, str, str, str]]:
|
|
91
|
-
"""
|
|
92
|
-
Fetch the most recent resume trigger from the database.
|
|
93
|
-
|
|
94
|
-
This private method queries the resume triggers table to retrieve the latest trigger
|
|
95
|
-
information based on timestamp. It handles database connection setup and executes
|
|
96
|
-
a SQL query to fetch trigger data needed for resume operations.
|
|
97
|
-
|
|
98
|
-
The method returns trigger information as a tuple containing:
|
|
99
|
-
- type: The type of trigger (e.g., 'API', 'MANUAL', etc.)
|
|
100
|
-
- key: The unique identifier for the trigger/item
|
|
101
|
-
- folder_path: The path to the folder containing the trigger
|
|
102
|
-
- folder_key: The unique identifier for the folder
|
|
103
|
-
- payload: The serialized payload data associated with the trigger
|
|
104
|
-
|
|
105
|
-
Returns:
|
|
106
|
-
Optional[tuple[str, str, str, str, str]]: A tuple containing (type, key, folder_path,
|
|
107
|
-
folder_key, payload) for the most recent trigger, or None if no triggers are found
|
|
108
|
-
or if the memory context is not available.
|
|
109
|
-
|
|
110
|
-
Raises:
|
|
111
|
-
LangGraphRuntimeError: If there's an error during database connection setup, query
|
|
112
|
-
execution, or result fetching. The original exception is wrapped with context
|
|
113
|
-
about the database operation failure.
|
|
114
|
-
"""
|
|
115
|
-
if self.context.memory is None:
|
|
116
|
-
return None
|
|
117
|
-
try:
|
|
118
|
-
await self.context.memory.setup()
|
|
119
|
-
async with (
|
|
120
|
-
self.context.memory.lock,
|
|
121
|
-
self.context.memory.conn.cursor() as cur,
|
|
122
|
-
):
|
|
123
|
-
await cur.execute(f"""
|
|
124
|
-
SELECT type, key, folder_path, folder_key, payload
|
|
125
|
-
FROM {self.context.resume_triggers_table}
|
|
126
|
-
ORDER BY timestamp DESC
|
|
127
|
-
LIMIT 1
|
|
128
|
-
""")
|
|
129
|
-
result = await cur.fetchone()
|
|
130
|
-
if result is None:
|
|
131
|
-
return None
|
|
132
|
-
return cast(tuple[str, str, str, str, str], tuple(result))
|
|
133
|
-
except Exception as e:
|
|
134
|
-
raise LangGraphRuntimeError(
|
|
135
|
-
"DB_QUERY_FAILED",
|
|
136
|
-
"Database query failed",
|
|
137
|
-
f"Error querying resume trigger information: {str(e)}",
|
|
138
|
-
UiPathErrorCategory.SYSTEM,
|
|
139
|
-
) from e
|
|
@@ -1,234 +0,0 @@
|
|
|
1
|
-
import json
|
|
2
|
-
import logging
|
|
3
|
-
from functools import cached_property
|
|
4
|
-
from typing import Any, Dict, Optional, cast
|
|
5
|
-
|
|
6
|
-
from langgraph.types import Interrupt, StateSnapshot
|
|
7
|
-
from uipath._cli._runtime._contracts import (
|
|
8
|
-
UiPathErrorCategory,
|
|
9
|
-
UiPathResumeTrigger,
|
|
10
|
-
UiPathRuntimeResult,
|
|
11
|
-
UiPathRuntimeStatus,
|
|
12
|
-
)
|
|
13
|
-
from uipath._cli._runtime._hitl import HitlProcessor
|
|
14
|
-
|
|
15
|
-
from ._context import LangGraphRuntimeContext
|
|
16
|
-
from ._exception import LangGraphRuntimeError
|
|
17
|
-
|
|
18
|
-
logger = logging.getLogger(__name__)
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
class LangGraphOutputProcessor:
|
|
22
|
-
"""
|
|
23
|
-
Contains and manages the complete output information from graph execution.
|
|
24
|
-
Handles serialization, interrupt data, and file output.
|
|
25
|
-
"""
|
|
26
|
-
|
|
27
|
-
def __init__(self, context: LangGraphRuntimeContext) -> None:
|
|
28
|
-
"""
|
|
29
|
-
Initialize the LangGraphOutputProcessor.
|
|
30
|
-
|
|
31
|
-
Args:
|
|
32
|
-
context: The runtime context for the graph execution.
|
|
33
|
-
"""
|
|
34
|
-
self.context = context
|
|
35
|
-
self._hitl_processor: Optional[HitlProcessor] = None
|
|
36
|
-
self._resume_trigger: Optional[UiPathResumeTrigger] = None
|
|
37
|
-
|
|
38
|
-
@classmethod
|
|
39
|
-
async def create(
|
|
40
|
-
cls, context: LangGraphRuntimeContext
|
|
41
|
-
) -> "LangGraphOutputProcessor":
|
|
42
|
-
"""
|
|
43
|
-
Create and initialize a new LangGraphOutputProcessor instance asynchronously.
|
|
44
|
-
|
|
45
|
-
Args:
|
|
46
|
-
context: The runtime context for the graph execution.
|
|
47
|
-
|
|
48
|
-
Returns:
|
|
49
|
-
LangGraphOutputProcessor: A new initialized instance.
|
|
50
|
-
"""
|
|
51
|
-
instance = cls(context)
|
|
52
|
-
|
|
53
|
-
# Process interrupt information during initialization
|
|
54
|
-
state = cast(StateSnapshot, context.state)
|
|
55
|
-
if not state or not hasattr(state, "next") or not state.next:
|
|
56
|
-
return instance
|
|
57
|
-
|
|
58
|
-
for task in state.tasks:
|
|
59
|
-
if hasattr(task, "interrupts") and task.interrupts:
|
|
60
|
-
for interrupt in task.interrupts:
|
|
61
|
-
if isinstance(interrupt, Interrupt):
|
|
62
|
-
instance._hitl_processor = HitlProcessor(interrupt.value)
|
|
63
|
-
return instance
|
|
64
|
-
|
|
65
|
-
return instance
|
|
66
|
-
|
|
67
|
-
@property
|
|
68
|
-
def status(self) -> UiPathRuntimeStatus:
|
|
69
|
-
"""Determines the execution status based on state."""
|
|
70
|
-
return (
|
|
71
|
-
UiPathRuntimeStatus.SUSPENDED
|
|
72
|
-
if self._hitl_processor
|
|
73
|
-
else UiPathRuntimeStatus.SUCCESSFUL
|
|
74
|
-
)
|
|
75
|
-
|
|
76
|
-
@cached_property
|
|
77
|
-
def serialized_output(self) -> Dict[str, Any]:
|
|
78
|
-
"""Serializes the graph execution result."""
|
|
79
|
-
try:
|
|
80
|
-
if self.context.output is None:
|
|
81
|
-
return {}
|
|
82
|
-
|
|
83
|
-
return self._serialize_object(self.context.output)
|
|
84
|
-
|
|
85
|
-
except Exception as e:
|
|
86
|
-
raise LangGraphRuntimeError(
|
|
87
|
-
"OUTPUT_SERIALIZATION_FAILED",
|
|
88
|
-
"Failed to serialize graph output",
|
|
89
|
-
f"Error serializing output data: {str(e)}",
|
|
90
|
-
UiPathErrorCategory.SYSTEM,
|
|
91
|
-
) from e
|
|
92
|
-
|
|
93
|
-
def _serialize_object(self, obj):
|
|
94
|
-
"""Recursively serializes an object and all its nested components."""
|
|
95
|
-
# Handle Pydantic models
|
|
96
|
-
if hasattr(obj, "dict"):
|
|
97
|
-
return self._serialize_object(obj.dict())
|
|
98
|
-
elif hasattr(obj, "model_dump"):
|
|
99
|
-
return self._serialize_object(obj.model_dump(by_alias=True))
|
|
100
|
-
elif hasattr(obj, "to_dict"):
|
|
101
|
-
return self._serialize_object(obj.to_dict())
|
|
102
|
-
# Handle dictionaries
|
|
103
|
-
elif isinstance(obj, dict):
|
|
104
|
-
return {k: self._serialize_object(v) for k, v in obj.items()}
|
|
105
|
-
# Handle lists
|
|
106
|
-
elif isinstance(obj, list):
|
|
107
|
-
return [self._serialize_object(item) for item in obj]
|
|
108
|
-
# Handle other iterable objects (convert to dict first)
|
|
109
|
-
elif hasattr(obj, "__iter__") and not isinstance(obj, (str, bytes)):
|
|
110
|
-
try:
|
|
111
|
-
return self._serialize_object(dict(obj))
|
|
112
|
-
except (TypeError, ValueError):
|
|
113
|
-
return obj
|
|
114
|
-
# Return primitive types as is
|
|
115
|
-
else:
|
|
116
|
-
return obj
|
|
117
|
-
|
|
118
|
-
async def process(self) -> UiPathRuntimeResult:
|
|
119
|
-
"""
|
|
120
|
-
Process the output and prepare the final execution result.
|
|
121
|
-
|
|
122
|
-
Returns:
|
|
123
|
-
UiPathRuntimeResult: The processed execution result.
|
|
124
|
-
|
|
125
|
-
Raises:
|
|
126
|
-
LangGraphRuntimeError: If processing fails.
|
|
127
|
-
"""
|
|
128
|
-
try:
|
|
129
|
-
await self._save_resume_trigger()
|
|
130
|
-
|
|
131
|
-
return UiPathRuntimeResult(
|
|
132
|
-
output=self.serialized_output,
|
|
133
|
-
status=self.status,
|
|
134
|
-
resume=self._resume_trigger if self._resume_trigger else None,
|
|
135
|
-
)
|
|
136
|
-
|
|
137
|
-
except LangGraphRuntimeError:
|
|
138
|
-
raise
|
|
139
|
-
except Exception as e:
|
|
140
|
-
raise LangGraphRuntimeError(
|
|
141
|
-
"OUTPUT_PROCESSING_FAILED",
|
|
142
|
-
"Failed to process execution output",
|
|
143
|
-
f"Unexpected error during output processing: {str(e)}",
|
|
144
|
-
UiPathErrorCategory.SYSTEM,
|
|
145
|
-
) from e
|
|
146
|
-
|
|
147
|
-
async def _save_resume_trigger(self) -> None:
|
|
148
|
-
"""
|
|
149
|
-
Stores the resume trigger in the SQLite database if available.
|
|
150
|
-
|
|
151
|
-
Raises:
|
|
152
|
-
LangGraphRuntimeError: If database operations fail.
|
|
153
|
-
"""
|
|
154
|
-
if not self._hitl_processor or not self.context.memory:
|
|
155
|
-
return
|
|
156
|
-
|
|
157
|
-
try:
|
|
158
|
-
await self.context.memory.setup()
|
|
159
|
-
async with (
|
|
160
|
-
self.context.memory.lock,
|
|
161
|
-
self.context.memory.conn.cursor() as cur,
|
|
162
|
-
):
|
|
163
|
-
try:
|
|
164
|
-
await cur.execute(f"""
|
|
165
|
-
CREATE TABLE IF NOT EXISTS {self.context.resume_triggers_table} (
|
|
166
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
167
|
-
type TEXT NOT NULL,
|
|
168
|
-
key TEXT,
|
|
169
|
-
folder_key TEXT,
|
|
170
|
-
folder_path TEXT,
|
|
171
|
-
payload TEXT,
|
|
172
|
-
timestamp DATETIME DEFAULT (strftime('%Y-%m-%d %H:%M:%S', 'now', 'utc'))
|
|
173
|
-
)
|
|
174
|
-
""")
|
|
175
|
-
except Exception as e:
|
|
176
|
-
raise LangGraphRuntimeError(
|
|
177
|
-
"DB_TABLE_CREATION_FAILED",
|
|
178
|
-
"Failed to create resume triggers table",
|
|
179
|
-
f"Database error while creating table: {str(e)}",
|
|
180
|
-
UiPathErrorCategory.SYSTEM,
|
|
181
|
-
) from e
|
|
182
|
-
|
|
183
|
-
try:
|
|
184
|
-
self._resume_trigger = (
|
|
185
|
-
await self._hitl_processor.create_resume_trigger()
|
|
186
|
-
)
|
|
187
|
-
except Exception as e:
|
|
188
|
-
raise LangGraphRuntimeError(
|
|
189
|
-
"HITL_EVENT_CREATION_FAILED",
|
|
190
|
-
"Failed to process HITL request",
|
|
191
|
-
f"Error while trying to process HITL request: {str(e)}",
|
|
192
|
-
UiPathErrorCategory.SYSTEM,
|
|
193
|
-
) from e
|
|
194
|
-
# if API trigger, override item_key and payload
|
|
195
|
-
if self._resume_trigger:
|
|
196
|
-
if self._resume_trigger.api_resume:
|
|
197
|
-
trigger_key = self._resume_trigger.api_resume.inbox_id
|
|
198
|
-
else:
|
|
199
|
-
trigger_key = self._resume_trigger.item_key
|
|
200
|
-
try:
|
|
201
|
-
logger.debug(
|
|
202
|
-
f"ResumeTrigger: {self._resume_trigger.trigger_type} {self._resume_trigger.item_key}"
|
|
203
|
-
)
|
|
204
|
-
if isinstance(self._resume_trigger.payload, dict):
|
|
205
|
-
payload = json.dumps(self._resume_trigger.payload)
|
|
206
|
-
else:
|
|
207
|
-
payload = str(self._resume_trigger.payload)
|
|
208
|
-
await cur.execute(
|
|
209
|
-
f"INSERT INTO {self.context.resume_triggers_table} (type, key, payload, folder_path, folder_key) VALUES (?, ?, ?, ?, ?)",
|
|
210
|
-
(
|
|
211
|
-
self._resume_trigger.trigger_type.value,
|
|
212
|
-
trigger_key,
|
|
213
|
-
payload,
|
|
214
|
-
self._resume_trigger.folder_path,
|
|
215
|
-
self._resume_trigger.folder_key,
|
|
216
|
-
),
|
|
217
|
-
)
|
|
218
|
-
await self.context.memory.conn.commit()
|
|
219
|
-
except Exception as e:
|
|
220
|
-
raise LangGraphRuntimeError(
|
|
221
|
-
"DB_INSERT_FAILED",
|
|
222
|
-
"Failed to save resume trigger",
|
|
223
|
-
f"Database error while saving resume trigger: {str(e)}",
|
|
224
|
-
UiPathErrorCategory.SYSTEM,
|
|
225
|
-
) from e
|
|
226
|
-
except LangGraphRuntimeError:
|
|
227
|
-
raise
|
|
228
|
-
except Exception as e:
|
|
229
|
-
raise LangGraphRuntimeError(
|
|
230
|
-
"RESUME_TRIGGER_SAVE_FAILED",
|
|
231
|
-
"Failed to save resume trigger",
|
|
232
|
-
f"Unexpected error while saving resume trigger: {str(e)}",
|
|
233
|
-
UiPathErrorCategory.SYSTEM,
|
|
234
|
-
) from e
|