uipath-langchain 0.0.112__py3-none-any.whl → 0.1.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uipath_langchain/_cli/_templates/main.py.template +12 -13
- uipath_langchain/_cli/cli_init.py +127 -156
- uipath_langchain/_cli/cli_new.py +2 -6
- uipath_langchain/_resources/AGENTS.md +21 -0
- uipath_langchain/_resources/REQUIRED_STRUCTURE.md +92 -0
- uipath_langchain/{tracers → _tracing}/__init__.py +0 -2
- uipath_langchain/_tracing/_instrument_traceable.py +134 -0
- uipath_langchain/_utils/__init__.py +1 -2
- uipath_langchain/_utils/_request_mixin.py +351 -54
- uipath_langchain/_utils/_settings.py +2 -11
- uipath_langchain/agent/exceptions/__init__.py +6 -0
- uipath_langchain/agent/exceptions/exceptions.py +11 -0
- uipath_langchain/agent/guardrails/__init__.py +21 -0
- uipath_langchain/agent/guardrails/actions/__init__.py +11 -0
- uipath_langchain/agent/guardrails/actions/base_action.py +23 -0
- uipath_langchain/agent/guardrails/actions/block_action.py +41 -0
- uipath_langchain/agent/guardrails/actions/escalate_action.py +274 -0
- uipath_langchain/agent/guardrails/actions/log_action.py +57 -0
- uipath_langchain/agent/guardrails/guardrail_nodes.py +125 -0
- uipath_langchain/agent/guardrails/guardrails_factory.py +70 -0
- uipath_langchain/agent/guardrails/guardrails_subgraph.py +247 -0
- uipath_langchain/agent/guardrails/types.py +20 -0
- uipath_langchain/agent/react/__init__.py +14 -0
- uipath_langchain/agent/react/agent.py +113 -0
- uipath_langchain/agent/react/constants.py +2 -0
- uipath_langchain/agent/react/init_node.py +20 -0
- uipath_langchain/agent/react/llm_node.py +43 -0
- uipath_langchain/agent/react/router.py +97 -0
- uipath_langchain/agent/react/terminate_node.py +82 -0
- uipath_langchain/agent/react/tools/__init__.py +7 -0
- uipath_langchain/agent/react/tools/tools.py +50 -0
- uipath_langchain/agent/react/types.py +39 -0
- uipath_langchain/agent/react/utils.py +49 -0
- uipath_langchain/agent/tools/__init__.py +17 -0
- uipath_langchain/agent/tools/context_tool.py +53 -0
- uipath_langchain/agent/tools/escalation_tool.py +111 -0
- uipath_langchain/agent/tools/integration_tool.py +181 -0
- uipath_langchain/agent/tools/process_tool.py +49 -0
- uipath_langchain/agent/tools/static_args.py +138 -0
- uipath_langchain/agent/tools/structured_tool_with_output_type.py +14 -0
- uipath_langchain/agent/tools/tool_factory.py +45 -0
- uipath_langchain/agent/tools/tool_node.py +22 -0
- uipath_langchain/agent/tools/utils.py +11 -0
- uipath_langchain/chat/__init__.py +4 -0
- uipath_langchain/chat/bedrock.py +187 -0
- uipath_langchain/chat/gemini.py +330 -0
- uipath_langchain/chat/mapper.py +309 -0
- uipath_langchain/chat/models.py +261 -38
- uipath_langchain/chat/openai.py +132 -0
- uipath_langchain/chat/supported_models.py +42 -0
- uipath_langchain/embeddings/embeddings.py +136 -36
- uipath_langchain/middlewares.py +0 -2
- uipath_langchain/py.typed +0 -0
- uipath_langchain/retrievers/context_grounding_retriever.py +7 -9
- uipath_langchain/runtime/__init__.py +36 -0
- uipath_langchain/runtime/_serialize.py +46 -0
- uipath_langchain/runtime/config.py +61 -0
- uipath_langchain/runtime/errors.py +43 -0
- uipath_langchain/runtime/factory.py +315 -0
- uipath_langchain/runtime/graph.py +159 -0
- uipath_langchain/runtime/runtime.py +453 -0
- uipath_langchain/runtime/schema.py +349 -0
- uipath_langchain/runtime/storage.py +115 -0
- uipath_langchain/vectorstores/context_grounding_vectorstore.py +90 -110
- {uipath_langchain-0.0.112.dist-info → uipath_langchain-0.1.24.dist-info}/METADATA +42 -20
- uipath_langchain-0.1.24.dist-info/RECORD +76 -0
- {uipath_langchain-0.0.112.dist-info → uipath_langchain-0.1.24.dist-info}/WHEEL +1 -1
- uipath_langchain-0.1.24.dist-info/entry_points.txt +5 -0
- uipath_langchain/_cli/_runtime/_context.py +0 -21
- uipath_langchain/_cli/_runtime/_exception.py +0 -17
- uipath_langchain/_cli/_runtime/_input.py +0 -136
- uipath_langchain/_cli/_runtime/_output.py +0 -234
- uipath_langchain/_cli/_runtime/_runtime.py +0 -371
- uipath_langchain/_cli/_utils/_graph.py +0 -202
- uipath_langchain/_cli/cli_run.py +0 -80
- uipath_langchain/tracers/AsyncUiPathTracer.py +0 -274
- uipath_langchain/tracers/_events.py +0 -33
- uipath_langchain/tracers/_instrument_traceable.py +0 -416
- uipath_langchain/tracers/_utils.py +0 -52
- uipath_langchain-0.0.112.dist-info/RECORD +0 -36
- uipath_langchain-0.0.112.dist-info/entry_points.txt +0 -2
- {uipath_langchain-0.0.112.dist-info → uipath_langchain-0.1.24.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,349 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Any, Callable, TypeVar
|
|
3
|
+
|
|
4
|
+
from langchain_core.language_models.base import BaseLanguageModel
|
|
5
|
+
from langchain_core.language_models.chat_models import BaseChatModel
|
|
6
|
+
from langchain_core.runnables.base import Runnable
|
|
7
|
+
from langchain_core.runnables.graph import Graph, Node
|
|
8
|
+
from langgraph.graph.state import CompiledStateGraph
|
|
9
|
+
from langgraph.prebuilt import ToolNode
|
|
10
|
+
from uipath.runtime.schema import (
|
|
11
|
+
UiPathRuntimeEdge,
|
|
12
|
+
UiPathRuntimeGraph,
|
|
13
|
+
UiPathRuntimeNode,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
try:
|
|
17
|
+
from langgraph._internal._runnable import RunnableCallable
|
|
18
|
+
except ImportError:
|
|
19
|
+
RunnableCallable = None # type: ignore
|
|
20
|
+
|
|
21
|
+
T = TypeVar("T")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class SchemaDetails:
|
|
26
|
+
schema: dict[str, Any]
|
|
27
|
+
has_input_circular_dependency: bool
|
|
28
|
+
has_output_circular_dependency: bool
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _unwrap_runnable_callable(
|
|
32
|
+
runnable: Runnable[Any, Any], target_type: type[T]
|
|
33
|
+
) -> T | None:
|
|
34
|
+
"""Unwrap a RunnableCallable to find an instance of the target type.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
runnable: The runnable to unwrap
|
|
38
|
+
target_type: The type to search for (e.g., BaseChatModel)
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
Instance of target_type if found in the closure, None otherwise
|
|
42
|
+
"""
|
|
43
|
+
if isinstance(runnable, target_type):
|
|
44
|
+
return runnable
|
|
45
|
+
|
|
46
|
+
if RunnableCallable is not None and isinstance(runnable, RunnableCallable):
|
|
47
|
+
func: Callable[..., Any] | None = getattr(runnable, "func", None)
|
|
48
|
+
if func is not None and hasattr(func, "__closure__") and func.__closure__:
|
|
49
|
+
for cell in func.__closure__:
|
|
50
|
+
if hasattr(cell, "cell_contents"):
|
|
51
|
+
content = cell.cell_contents
|
|
52
|
+
if isinstance(content, target_type):
|
|
53
|
+
return content
|
|
54
|
+
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _get_node_type(node: Node) -> str:
|
|
59
|
+
"""Determine the type of a LangGraph node using strongly-typed isinstance checks.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
node: A Node object from the graph
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
String representing the node type
|
|
66
|
+
"""
|
|
67
|
+
if node.id in ("__start__", "__end__"):
|
|
68
|
+
return node.id
|
|
69
|
+
|
|
70
|
+
if node.data is None:
|
|
71
|
+
return "node"
|
|
72
|
+
|
|
73
|
+
if not isinstance(node.data, Runnable):
|
|
74
|
+
return "node"
|
|
75
|
+
|
|
76
|
+
tool_node = _unwrap_runnable_callable(node.data, ToolNode)
|
|
77
|
+
if tool_node is not None:
|
|
78
|
+
return "tool"
|
|
79
|
+
|
|
80
|
+
chat_model = _unwrap_runnable_callable(node.data, BaseChatModel) # type: ignore[type-abstract]
|
|
81
|
+
if chat_model is not None:
|
|
82
|
+
return "model"
|
|
83
|
+
|
|
84
|
+
language_model = _unwrap_runnable_callable(node.data, BaseLanguageModel) # type: ignore[type-abstract]
|
|
85
|
+
if language_model is not None:
|
|
86
|
+
return "model"
|
|
87
|
+
|
|
88
|
+
return "node"
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def _get_node_metadata(node: Node) -> dict[str, Any]:
|
|
92
|
+
"""Extract metadata from a node in a type-safe manner.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
node: A Node object from the graph
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
Dictionary containing node metadata
|
|
99
|
+
"""
|
|
100
|
+
if node.data is None:
|
|
101
|
+
return {}
|
|
102
|
+
|
|
103
|
+
# Early return if data is not a Runnable
|
|
104
|
+
if not isinstance(node.data, Runnable):
|
|
105
|
+
return {}
|
|
106
|
+
|
|
107
|
+
metadata: dict[str, Any] = {}
|
|
108
|
+
|
|
109
|
+
tool_node = _unwrap_runnable_callable(node.data, ToolNode)
|
|
110
|
+
if tool_node is not None:
|
|
111
|
+
if hasattr(tool_node, "_tools_by_name"):
|
|
112
|
+
tools_by_name = tool_node._tools_by_name
|
|
113
|
+
metadata["tool_names"] = list(tools_by_name.keys())
|
|
114
|
+
metadata["tool_count"] = len(tools_by_name)
|
|
115
|
+
return metadata
|
|
116
|
+
|
|
117
|
+
chat_model = _unwrap_runnable_callable(node.data, BaseChatModel) # type: ignore[type-abstract]
|
|
118
|
+
if chat_model is not None:
|
|
119
|
+
if hasattr(chat_model, "model") and isinstance(chat_model.model, str):
|
|
120
|
+
metadata["model_name"] = chat_model.model
|
|
121
|
+
elif hasattr(chat_model, "model_name") and chat_model.model_name:
|
|
122
|
+
metadata["model_name"] = chat_model.model_name
|
|
123
|
+
|
|
124
|
+
if hasattr(chat_model, "temperature") and chat_model.temperature is not None:
|
|
125
|
+
metadata["temperature"] = chat_model.temperature
|
|
126
|
+
|
|
127
|
+
if hasattr(chat_model, "max_tokens") and chat_model.max_tokens is not None:
|
|
128
|
+
metadata["max_tokens"] = chat_model.max_tokens
|
|
129
|
+
elif (
|
|
130
|
+
hasattr(chat_model, "max_completion_tokens")
|
|
131
|
+
and chat_model.max_completion_tokens is not None
|
|
132
|
+
):
|
|
133
|
+
metadata["max_tokens"] = chat_model.max_completion_tokens
|
|
134
|
+
|
|
135
|
+
return metadata
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def _convert_graph_to_uipath(graph: Graph) -> UiPathRuntimeGraph:
|
|
139
|
+
"""Helper to convert a LangGraph Graph object to UiPathRuntimeGraph.
|
|
140
|
+
|
|
141
|
+
Args:
|
|
142
|
+
graph: A LangGraph Graph object (from get_graph() call)
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
UiPathRuntimeGraph with nodes and edges
|
|
146
|
+
"""
|
|
147
|
+
nodes: list[UiPathRuntimeNode] = []
|
|
148
|
+
for _, node in graph.nodes.items():
|
|
149
|
+
nodes.append(
|
|
150
|
+
UiPathRuntimeNode(
|
|
151
|
+
id=node.id,
|
|
152
|
+
name=node.name or node.id,
|
|
153
|
+
type=_get_node_type(node),
|
|
154
|
+
metadata=_get_node_metadata(node),
|
|
155
|
+
subgraph=None,
|
|
156
|
+
)
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
edges: list[UiPathRuntimeEdge] = []
|
|
160
|
+
for edge in graph.edges:
|
|
161
|
+
edges.append(
|
|
162
|
+
UiPathRuntimeEdge(
|
|
163
|
+
source=edge.source,
|
|
164
|
+
target=edge.target,
|
|
165
|
+
label=getattr(edge, "data", None) or getattr(edge, "label", None),
|
|
166
|
+
)
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
return UiPathRuntimeGraph(nodes=nodes, edges=edges)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def get_graph_schema(
|
|
173
|
+
compiled_graph: CompiledStateGraph[Any, Any, Any, Any], xray: int = 1
|
|
174
|
+
) -> UiPathRuntimeGraph:
|
|
175
|
+
"""Convert a compiled LangGraph to UiPathRuntimeGraph structure.
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
compiled_graph: A compiled LangGraph (Pregel instance)
|
|
179
|
+
xray: Depth of subgraph expansion (0 = no subgraphs, 1 = one level, etc.)
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
UiPathRuntimeGraph with hierarchical subgraph structure
|
|
183
|
+
"""
|
|
184
|
+
graph: Graph = compiled_graph.get_graph(xray=0) # Keep parent at xray=0
|
|
185
|
+
|
|
186
|
+
subgraphs_dict: dict[str, UiPathRuntimeGraph] = {}
|
|
187
|
+
if xray:
|
|
188
|
+
for name, subgraph_pregel in compiled_graph.get_subgraphs():
|
|
189
|
+
next_xray: int = xray - 1 if isinstance(xray, int) and xray > 0 else 0
|
|
190
|
+
subgraph_graph: Graph = subgraph_pregel.get_graph(xray=next_xray)
|
|
191
|
+
subgraphs_dict[name] = _convert_graph_to_uipath(subgraph_graph)
|
|
192
|
+
|
|
193
|
+
nodes: list[UiPathRuntimeNode] = []
|
|
194
|
+
for node_id, node in graph.nodes.items():
|
|
195
|
+
subgraph: UiPathRuntimeGraph | None = subgraphs_dict.get(node_id)
|
|
196
|
+
|
|
197
|
+
nodes.append(
|
|
198
|
+
UiPathRuntimeNode(
|
|
199
|
+
id=node.id,
|
|
200
|
+
name=node.name or node.id,
|
|
201
|
+
type=_get_node_type(node),
|
|
202
|
+
metadata=_get_node_metadata(node),
|
|
203
|
+
subgraph=subgraph,
|
|
204
|
+
)
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
edges: list[UiPathRuntimeEdge] = []
|
|
208
|
+
for edge in graph.edges:
|
|
209
|
+
edges.append(
|
|
210
|
+
UiPathRuntimeEdge(
|
|
211
|
+
source=edge.source,
|
|
212
|
+
target=edge.target,
|
|
213
|
+
label=getattr(edge, "data", None) or getattr(edge, "label", None),
|
|
214
|
+
)
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
return UiPathRuntimeGraph(nodes=nodes, edges=edges)
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def get_entrypoints_schema(
|
|
221
|
+
graph: CompiledStateGraph[Any, Any, Any],
|
|
222
|
+
) -> SchemaDetails:
|
|
223
|
+
"""Extract input/output schema from a LangGraph graph"""
|
|
224
|
+
input_circular_dependency = False
|
|
225
|
+
output_circular_dependency = False
|
|
226
|
+
schema = {
|
|
227
|
+
"input": {"type": "object", "properties": {}, "required": []},
|
|
228
|
+
"output": {"type": "object", "properties": {}, "required": []},
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
if hasattr(graph, "input_schema"):
|
|
232
|
+
if hasattr(graph.input_schema, "model_json_schema"):
|
|
233
|
+
input_schema = graph.input_schema.model_json_schema()
|
|
234
|
+
unpacked_ref_def_properties, input_circular_dependency = _resolve_refs(
|
|
235
|
+
input_schema
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
# Process the schema to handle nullable types
|
|
239
|
+
processed_properties = _process_nullable_types(
|
|
240
|
+
unpacked_ref_def_properties.get("properties", {})
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
schema["input"]["properties"] = processed_properties
|
|
244
|
+
schema["input"]["required"] = unpacked_ref_def_properties.get(
|
|
245
|
+
"required", []
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
if hasattr(graph, "output_schema"):
|
|
249
|
+
if hasattr(graph.output_schema, "model_json_schema"):
|
|
250
|
+
output_schema = graph.output_schema.model_json_schema()
|
|
251
|
+
unpacked_ref_def_properties, output_circular_dependency = _resolve_refs(
|
|
252
|
+
output_schema
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
# Process the schema to handle nullable types
|
|
256
|
+
processed_properties = _process_nullable_types(
|
|
257
|
+
unpacked_ref_def_properties.get("properties", {})
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
schema["output"]["properties"] = processed_properties
|
|
261
|
+
schema["output"]["required"] = unpacked_ref_def_properties.get(
|
|
262
|
+
"required", []
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
return SchemaDetails(schema, input_circular_dependency, output_circular_dependency)
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def _resolve_refs(schema, root=None, visited=None):
|
|
269
|
+
"""Recursively resolves $ref references in a JSON schema, handling circular references.
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
tuple: (resolved_schema, has_circular_dependency)
|
|
273
|
+
"""
|
|
274
|
+
if root is None:
|
|
275
|
+
root = schema
|
|
276
|
+
|
|
277
|
+
if visited is None:
|
|
278
|
+
visited = set()
|
|
279
|
+
|
|
280
|
+
has_circular = False
|
|
281
|
+
|
|
282
|
+
if isinstance(schema, dict):
|
|
283
|
+
if "$ref" in schema:
|
|
284
|
+
ref_path = schema["$ref"]
|
|
285
|
+
|
|
286
|
+
if ref_path in visited:
|
|
287
|
+
# Circular dependency detected
|
|
288
|
+
return {
|
|
289
|
+
"type": "object",
|
|
290
|
+
"description": f"Circular reference to {ref_path}",
|
|
291
|
+
}, True
|
|
292
|
+
|
|
293
|
+
visited.add(ref_path)
|
|
294
|
+
|
|
295
|
+
# Resolve the reference
|
|
296
|
+
ref_parts = ref_path.lstrip("#/").split("/")
|
|
297
|
+
ref_schema = root
|
|
298
|
+
for part in ref_parts:
|
|
299
|
+
ref_schema = ref_schema.get(part, {})
|
|
300
|
+
|
|
301
|
+
result, circular = _resolve_refs(ref_schema, root, visited)
|
|
302
|
+
has_circular = has_circular or circular
|
|
303
|
+
|
|
304
|
+
# Remove from visited after resolution (allows the same ref in different branches)
|
|
305
|
+
visited.discard(ref_path)
|
|
306
|
+
|
|
307
|
+
return result, has_circular
|
|
308
|
+
|
|
309
|
+
resolved_dict = {}
|
|
310
|
+
for k, v in schema.items():
|
|
311
|
+
resolved_value, circular = _resolve_refs(v, root, visited)
|
|
312
|
+
resolved_dict[k] = resolved_value
|
|
313
|
+
has_circular = has_circular or circular
|
|
314
|
+
return resolved_dict, has_circular
|
|
315
|
+
|
|
316
|
+
elif isinstance(schema, list):
|
|
317
|
+
resolved_list = []
|
|
318
|
+
for item in schema:
|
|
319
|
+
resolved_item, circular = _resolve_refs(item, root, visited)
|
|
320
|
+
resolved_list.append(resolved_item)
|
|
321
|
+
has_circular = has_circular or circular
|
|
322
|
+
return resolved_list, has_circular
|
|
323
|
+
|
|
324
|
+
return schema, False
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
def _process_nullable_types(
|
|
328
|
+
schema: dict[str, Any] | list[Any] | Any,
|
|
329
|
+
) -> dict[str, Any] | list[Any]:
|
|
330
|
+
"""Process the schema to handle nullable types by removing anyOf with null and keeping the base type."""
|
|
331
|
+
if isinstance(schema, dict):
|
|
332
|
+
if "anyOf" in schema and len(schema["anyOf"]) == 2:
|
|
333
|
+
types = [t.get("type") for t in schema["anyOf"]]
|
|
334
|
+
if "null" in types:
|
|
335
|
+
non_null_type = next(
|
|
336
|
+
t for t in schema["anyOf"] if t.get("type") != "null"
|
|
337
|
+
)
|
|
338
|
+
return non_null_type
|
|
339
|
+
|
|
340
|
+
return {k: _process_nullable_types(v) for k, v in schema.items()}
|
|
341
|
+
elif isinstance(schema, list):
|
|
342
|
+
return [_process_nullable_types(item) for item in schema]
|
|
343
|
+
return schema
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
__all__ = [
|
|
347
|
+
"get_graph_schema",
|
|
348
|
+
"get_entrypoints_schema",
|
|
349
|
+
]
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
"""SQLite implementation of UiPathResumableStorageProtocol."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from typing import cast
|
|
5
|
+
|
|
6
|
+
from langgraph.checkpoint.sqlite.aio import AsyncSqliteSaver
|
|
7
|
+
from pydantic import BaseModel
|
|
8
|
+
from uipath.runtime import (
|
|
9
|
+
UiPathApiTrigger,
|
|
10
|
+
UiPathResumeTrigger,
|
|
11
|
+
UiPathResumeTriggerName,
|
|
12
|
+
UiPathResumeTriggerType,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class SqliteResumableStorage:
|
|
17
|
+
"""SQLite storage for resume triggers."""
|
|
18
|
+
|
|
19
|
+
def __init__(
|
|
20
|
+
self, memory: AsyncSqliteSaver, table_name: str = "__uipath_resume_triggers"
|
|
21
|
+
):
|
|
22
|
+
self.memory = memory
|
|
23
|
+
self.table_name = table_name
|
|
24
|
+
self._initialized = False
|
|
25
|
+
|
|
26
|
+
async def _ensure_table(self) -> None:
|
|
27
|
+
"""Create table if needed."""
|
|
28
|
+
if self._initialized:
|
|
29
|
+
return
|
|
30
|
+
|
|
31
|
+
await self.memory.setup()
|
|
32
|
+
async with self.memory.lock, self.memory.conn.cursor() as cur:
|
|
33
|
+
await cur.execute(f"""
|
|
34
|
+
CREATE TABLE IF NOT EXISTS {self.table_name} (
|
|
35
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
36
|
+
type TEXT NOT NULL,
|
|
37
|
+
name TEXT NOT NULL,
|
|
38
|
+
key TEXT,
|
|
39
|
+
folder_key TEXT,
|
|
40
|
+
folder_path TEXT,
|
|
41
|
+
payload TEXT,
|
|
42
|
+
timestamp DATETIME DEFAULT (strftime('%Y-%m-%d %H:%M:%S', 'now', 'utc'))
|
|
43
|
+
)
|
|
44
|
+
""")
|
|
45
|
+
await self.memory.conn.commit()
|
|
46
|
+
self._initialized = True
|
|
47
|
+
|
|
48
|
+
async def save_trigger(self, trigger: UiPathResumeTrigger) -> None:
|
|
49
|
+
"""Save resume trigger to database."""
|
|
50
|
+
await self._ensure_table()
|
|
51
|
+
|
|
52
|
+
trigger_key = (
|
|
53
|
+
trigger.api_resume.inbox_id if trigger.api_resume else trigger.item_key
|
|
54
|
+
)
|
|
55
|
+
payload = trigger.payload
|
|
56
|
+
if payload:
|
|
57
|
+
payload = (
|
|
58
|
+
(
|
|
59
|
+
payload.model_dump()
|
|
60
|
+
if isinstance(payload, BaseModel)
|
|
61
|
+
else json.dumps(payload)
|
|
62
|
+
)
|
|
63
|
+
if isinstance(payload, dict)
|
|
64
|
+
else str(payload)
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
async with self.memory.lock, self.memory.conn.cursor() as cur:
|
|
68
|
+
await cur.execute(
|
|
69
|
+
f"INSERT INTO {self.table_name} (type, key, name, payload, folder_path, folder_key) VALUES (?, ?, ?, ?, ?, ?)",
|
|
70
|
+
(
|
|
71
|
+
trigger.trigger_type.value,
|
|
72
|
+
trigger_key,
|
|
73
|
+
trigger.trigger_name.value,
|
|
74
|
+
payload,
|
|
75
|
+
trigger.folder_path,
|
|
76
|
+
trigger.folder_key,
|
|
77
|
+
),
|
|
78
|
+
)
|
|
79
|
+
await self.memory.conn.commit()
|
|
80
|
+
|
|
81
|
+
async def get_latest_trigger(self) -> UiPathResumeTrigger | None:
|
|
82
|
+
"""Get most recent trigger from database."""
|
|
83
|
+
await self._ensure_table()
|
|
84
|
+
|
|
85
|
+
async with self.memory.lock, self.memory.conn.cursor() as cur:
|
|
86
|
+
await cur.execute(f"""
|
|
87
|
+
SELECT type, key, name, folder_path, folder_key, payload
|
|
88
|
+
FROM {self.table_name}
|
|
89
|
+
ORDER BY timestamp DESC
|
|
90
|
+
LIMIT 1
|
|
91
|
+
""")
|
|
92
|
+
result = await cur.fetchone()
|
|
93
|
+
|
|
94
|
+
if not result:
|
|
95
|
+
return None
|
|
96
|
+
|
|
97
|
+
trigger_type, key, name, folder_path, folder_key, payload = cast(
|
|
98
|
+
tuple[str, str, str, str, str, str], tuple(result)
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
resume_trigger = UiPathResumeTrigger(
|
|
102
|
+
trigger_type=UiPathResumeTriggerType(trigger_type),
|
|
103
|
+
trigger_name=UiPathResumeTriggerName(name),
|
|
104
|
+
item_key=key,
|
|
105
|
+
folder_path=folder_path,
|
|
106
|
+
folder_key=folder_key,
|
|
107
|
+
payload=payload,
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
if resume_trigger.trigger_type == UiPathResumeTriggerType.API:
|
|
111
|
+
resume_trigger.api_resume = UiPathApiTrigger(
|
|
112
|
+
inbox_id=resume_trigger.item_key, request=resume_trigger.payload
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
return resume_trigger
|