uipath-langchain 0.1.24__py3-none-any.whl → 0.1.34__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uipath_langchain/_utils/_request_mixin.py +8 -0
- uipath_langchain/_utils/_settings.py +3 -2
- uipath_langchain/agent/guardrails/__init__.py +0 -16
- uipath_langchain/agent/guardrails/actions/__init__.py +2 -0
- uipath_langchain/agent/guardrails/actions/base_action.py +1 -0
- uipath_langchain/agent/guardrails/actions/block_action.py +2 -1
- uipath_langchain/agent/guardrails/actions/escalate_action.py +243 -35
- uipath_langchain/agent/guardrails/actions/filter_action.py +55 -0
- uipath_langchain/agent/guardrails/actions/log_action.py +2 -1
- uipath_langchain/agent/guardrails/guardrail_nodes.py +186 -22
- uipath_langchain/agent/guardrails/guardrails_factory.py +200 -4
- uipath_langchain/agent/guardrails/types.py +0 -12
- uipath_langchain/agent/guardrails/utils.py +146 -0
- uipath_langchain/agent/react/agent.py +25 -8
- uipath_langchain/agent/react/constants.py +1 -2
- uipath_langchain/agent/{guardrails → react/guardrails}/guardrails_subgraph.py +94 -19
- uipath_langchain/agent/react/llm_node.py +41 -10
- uipath_langchain/agent/react/router.py +48 -37
- uipath_langchain/agent/react/types.py +15 -1
- uipath_langchain/agent/react/utils.py +1 -1
- uipath_langchain/agent/tools/__init__.py +2 -0
- uipath_langchain/agent/tools/mcp_tool.py +86 -0
- uipath_langchain/chat/__init__.py +4 -0
- uipath_langchain/chat/bedrock.py +16 -0
- uipath_langchain/chat/openai.py +57 -26
- uipath_langchain/chat/supported_models.py +9 -0
- uipath_langchain/chat/vertex.py +271 -0
- uipath_langchain/embeddings/embeddings.py +18 -12
- uipath_langchain/runtime/schema.py +116 -23
- {uipath_langchain-0.1.24.dist-info → uipath_langchain-0.1.34.dist-info}/METADATA +9 -6
- {uipath_langchain-0.1.24.dist-info → uipath_langchain-0.1.34.dist-info}/RECORD +34 -31
- uipath_langchain/chat/gemini.py +0 -330
- {uipath_langchain-0.1.24.dist-info → uipath_langchain-0.1.34.dist-info}/WHEEL +0 -0
- {uipath_langchain-0.1.24.dist-info → uipath_langchain-0.1.34.dist-info}/entry_points.txt +0 -0
- {uipath_langchain-0.1.24.dist-info → uipath_langchain-0.1.34.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from collections.abc import Iterable
|
|
1
2
|
from dataclasses import dataclass
|
|
2
3
|
from typing import Any, Callable, TypeVar
|
|
3
4
|
|
|
@@ -7,6 +8,8 @@ from langchain_core.runnables.base import Runnable
|
|
|
7
8
|
from langchain_core.runnables.graph import Graph, Node
|
|
8
9
|
from langgraph.graph.state import CompiledStateGraph
|
|
9
10
|
from langgraph.prebuilt import ToolNode
|
|
11
|
+
from langgraph.pregel._read import PregelNode
|
|
12
|
+
from langgraph.pregel._write import ChannelWrite, ChannelWriteEntry
|
|
10
13
|
from uipath.runtime.schema import (
|
|
11
14
|
UiPathRuntimeEdge,
|
|
12
15
|
UiPathRuntimeGraph,
|
|
@@ -29,28 +32,83 @@ class SchemaDetails:
|
|
|
29
32
|
|
|
30
33
|
|
|
31
34
|
def _unwrap_runnable_callable(
|
|
32
|
-
runnable: Runnable[Any, Any],
|
|
35
|
+
runnable: Runnable[Any, Any],
|
|
36
|
+
target_type: type[T],
|
|
37
|
+
_seen: set[int] | None = None,
|
|
33
38
|
) -> T | None:
|
|
34
|
-
"""
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
39
|
+
"""Try to find an instance of target_type (e.g., BaseChatModel)
|
|
40
|
+
inside a Runnable.
|
|
41
|
+
|
|
42
|
+
Handles:
|
|
43
|
+
- Direct model runnables
|
|
44
|
+
- LangGraph RunnableCallable
|
|
45
|
+
- LangChain function runnables (RunnableLambda, etc.)
|
|
46
|
+
- RunnableBinding / RunnableSequence with nested steps
|
|
42
47
|
"""
|
|
43
48
|
if isinstance(runnable, target_type):
|
|
44
49
|
return runnable
|
|
45
50
|
|
|
51
|
+
if _seen is None:
|
|
52
|
+
_seen = set()
|
|
53
|
+
obj_id = id(runnable)
|
|
54
|
+
if obj_id in _seen:
|
|
55
|
+
return None
|
|
56
|
+
_seen.add(obj_id)
|
|
57
|
+
|
|
58
|
+
func: Callable[..., Any] | None = None
|
|
59
|
+
|
|
60
|
+
# 1) LangGraph internal RunnableCallable
|
|
46
61
|
if RunnableCallable is not None and isinstance(runnable, RunnableCallable):
|
|
47
|
-
func
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
62
|
+
func = getattr(runnable, "func", None)
|
|
63
|
+
|
|
64
|
+
# 2) Generic LangChain function-wrapping runnables
|
|
65
|
+
if func is None:
|
|
66
|
+
for attr_name in ("func", "_func", "afunc", "_afunc"):
|
|
67
|
+
maybe = getattr(runnable, attr_name, None)
|
|
68
|
+
if callable(maybe):
|
|
69
|
+
func = maybe
|
|
70
|
+
break
|
|
71
|
+
|
|
72
|
+
# 3) Look into the function closure for a model
|
|
73
|
+
if func is not None:
|
|
74
|
+
closure = getattr(func, "__closure__", None) or ()
|
|
75
|
+
for cell in closure:
|
|
76
|
+
content = getattr(cell, "cell_contents", None)
|
|
77
|
+
if isinstance(content, target_type):
|
|
78
|
+
return content
|
|
79
|
+
if isinstance(content, Runnable):
|
|
80
|
+
found = _unwrap_runnable_callable(content, target_type, _seen)
|
|
81
|
+
if found is not None:
|
|
82
|
+
return found
|
|
83
|
+
|
|
84
|
+
# 4) Deep-scan attributes, including nested runnables / containers
|
|
85
|
+
def _scan_value(value: Any) -> T | None:
|
|
86
|
+
if isinstance(value, target_type):
|
|
87
|
+
return value
|
|
88
|
+
if isinstance(value, Runnable):
|
|
89
|
+
return _unwrap_runnable_callable(value, target_type, _seen)
|
|
90
|
+
if isinstance(value, dict):
|
|
91
|
+
for v in value.values():
|
|
92
|
+
found = _scan_value(v)
|
|
93
|
+
if found is not None:
|
|
94
|
+
return found
|
|
95
|
+
# Handle lists, tuples, sets, etc. but avoid strings/bytes
|
|
96
|
+
if isinstance(value, Iterable) and not isinstance(value, (str, bytes)):
|
|
97
|
+
for item in value:
|
|
98
|
+
found = _scan_value(item)
|
|
99
|
+
if found is not None:
|
|
100
|
+
return found
|
|
101
|
+
return None
|
|
102
|
+
|
|
103
|
+
try:
|
|
104
|
+
attrs = vars(runnable)
|
|
105
|
+
except TypeError:
|
|
106
|
+
attrs = {}
|
|
107
|
+
|
|
108
|
+
for value in attrs.values():
|
|
109
|
+
found = _scan_value(value)
|
|
110
|
+
if found is not None:
|
|
111
|
+
return found
|
|
54
112
|
|
|
55
113
|
return None
|
|
56
114
|
|
|
@@ -193,7 +251,6 @@ def get_graph_schema(
|
|
|
193
251
|
nodes: list[UiPathRuntimeNode] = []
|
|
194
252
|
for node_id, node in graph.nodes.items():
|
|
195
253
|
subgraph: UiPathRuntimeGraph | None = subgraphs_dict.get(node_id)
|
|
196
|
-
|
|
197
254
|
nodes.append(
|
|
198
255
|
UiPathRuntimeNode(
|
|
199
256
|
id=node.id,
|
|
@@ -204,15 +261,51 @@ def get_graph_schema(
|
|
|
204
261
|
)
|
|
205
262
|
)
|
|
206
263
|
|
|
264
|
+
# Use a set to track unique edges (source, target)
|
|
265
|
+
seen_edges: set[tuple[str, str]] = set()
|
|
207
266
|
edges: list[UiPathRuntimeEdge] = []
|
|
267
|
+
|
|
268
|
+
# First, add edges from graph.edges (static edges)
|
|
208
269
|
for edge in graph.edges:
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
270
|
+
edge_tuple = (edge.source, edge.target)
|
|
271
|
+
if edge_tuple not in seen_edges:
|
|
272
|
+
seen_edges.add(edge_tuple)
|
|
273
|
+
edges.append(
|
|
274
|
+
UiPathRuntimeEdge(
|
|
275
|
+
source=edge.source,
|
|
276
|
+
target=edge.target,
|
|
277
|
+
label=getattr(edge, "data", None) or getattr(edge, "label", None),
|
|
278
|
+
)
|
|
214
279
|
)
|
|
215
|
-
|
|
280
|
+
|
|
281
|
+
# Build a map of channel -> target node
|
|
282
|
+
channel_to_target: dict[str, str] = {}
|
|
283
|
+
node_spec: PregelNode
|
|
284
|
+
for node_name, node_spec in compiled_graph.nodes.items():
|
|
285
|
+
for trigger in node_spec.triggers:
|
|
286
|
+
if isinstance(trigger, str) and trigger.startswith("branch:to:"):
|
|
287
|
+
channel_to_target[trigger] = node_name
|
|
288
|
+
|
|
289
|
+
# Extract edges by looking at what each node writes to (dynamic edges from Command)
|
|
290
|
+
for source_name, node_spec in compiled_graph.nodes.items():
|
|
291
|
+
writer: Runnable[Any, Any]
|
|
292
|
+
for writer in node_spec.writers:
|
|
293
|
+
if isinstance(writer, ChannelWrite):
|
|
294
|
+
write_entry: Any
|
|
295
|
+
for write_entry in writer.writes:
|
|
296
|
+
if isinstance(write_entry, ChannelWriteEntry) and isinstance(
|
|
297
|
+
write_entry.channel, str
|
|
298
|
+
):
|
|
299
|
+
target = channel_to_target.get(write_entry.channel)
|
|
300
|
+
if target:
|
|
301
|
+
edge_tuple = (source_name, target)
|
|
302
|
+
if edge_tuple not in seen_edges:
|
|
303
|
+
seen_edges.add(edge_tuple)
|
|
304
|
+
edges.append(
|
|
305
|
+
UiPathRuntimeEdge(
|
|
306
|
+
source=source_name, target=target, label=None
|
|
307
|
+
)
|
|
308
|
+
)
|
|
216
309
|
|
|
217
310
|
return UiPathRuntimeGraph(nodes=nodes, edges=edges)
|
|
218
311
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: uipath-langchain
|
|
3
|
-
Version: 0.1.
|
|
4
|
-
Summary: UiPath
|
|
3
|
+
Version: 0.1.34
|
|
4
|
+
Summary: Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform
|
|
5
5
|
Project-URL: Homepage, https://uipath.com
|
|
6
6
|
Project-URL: Repository, https://github.com/UiPath/uipath-langchain-python
|
|
7
7
|
Project-URL: Documentation, https://uipath.github.io/uipath-python/
|
|
@@ -13,29 +13,32 @@ Classifier: Programming Language :: Python :: 3.12
|
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.13
|
|
14
14
|
Classifier: Topic :: Software Development :: Build Tools
|
|
15
15
|
Requires-Python: >=3.11
|
|
16
|
+
Requires-Dist: aiosqlite==0.21.0
|
|
16
17
|
Requires-Dist: httpx>=0.27.0
|
|
17
18
|
Requires-Dist: jsonpath-ng>=1.7.0
|
|
18
19
|
Requires-Dist: jsonschema-pydantic-converter>=0.1.5
|
|
19
20
|
Requires-Dist: langchain-core<2.0.0,>=1.0.0
|
|
21
|
+
Requires-Dist: langchain-mcp-adapters==0.2.1
|
|
20
22
|
Requires-Dist: langchain-openai<2.0.0,>=1.0.0
|
|
21
23
|
Requires-Dist: langchain<2.0.0,>=1.0.0
|
|
22
24
|
Requires-Dist: langgraph-checkpoint-sqlite<4.0.0,>=3.0.0
|
|
23
25
|
Requires-Dist: langgraph<2.0.0,>=1.0.0
|
|
26
|
+
Requires-Dist: mcp==1.24.0
|
|
24
27
|
Requires-Dist: openinference-instrumentation-langchain>=0.1.56
|
|
25
28
|
Requires-Dist: pydantic-settings>=2.6.0
|
|
26
29
|
Requires-Dist: python-dotenv>=1.0.1
|
|
27
|
-
Requires-Dist: uipath<2.3.0,>=2.2.
|
|
30
|
+
Requires-Dist: uipath<2.3.0,>=2.2.35
|
|
28
31
|
Provides-Extra: bedrock
|
|
29
32
|
Requires-Dist: boto3-stubs>=1.41.4; extra == 'bedrock'
|
|
30
33
|
Requires-Dist: langchain-aws>=0.2.35; extra == 'bedrock'
|
|
31
34
|
Provides-Extra: vertex
|
|
32
|
-
Requires-Dist:
|
|
33
|
-
Requires-Dist: langchain-google-
|
|
34
|
-
Requires-Dist: types-protobuf>=6.32.1.20251105; extra == 'vertex'
|
|
35
|
+
Requires-Dist: google-generativeai>=0.8.0; extra == 'vertex'
|
|
36
|
+
Requires-Dist: langchain-google-genai>=2.0.0; extra == 'vertex'
|
|
35
37
|
Description-Content-Type: text/markdown
|
|
36
38
|
|
|
37
39
|
# UiPath LangChain Python SDK
|
|
38
40
|
|
|
41
|
+
[](https://pypi.org/project/uipath-langchain/)
|
|
39
42
|
[](https://pypi.org/project/uipath-langchain/)
|
|
40
43
|
[](https://pypi.org/project/uipath-langchain/)
|
|
41
44
|
|
|
@@ -11,51 +11,54 @@ uipath_langchain/_resources/REQUIRED_STRUCTURE.md,sha256=BRmWWFtM0qNXj5uumALVxq9
|
|
|
11
11
|
uipath_langchain/_tracing/__init__.py,sha256=C2dRvQ2ynxCmyICgE-rJHimWKEcFRME_o9gfX84Mb3Y,123
|
|
12
12
|
uipath_langchain/_tracing/_instrument_traceable.py,sha256=cIxtDvlrPxVSZLOvpl571HxFO3wg7pNem4rLUuMI2Vg,4276
|
|
13
13
|
uipath_langchain/_utils/__init__.py,sha256=-w-4TD9ZnJDCpj4VIPXhJciukrmDJJbmnOFnhAkAaEU,81
|
|
14
|
-
uipath_langchain/_utils/_request_mixin.py,sha256=
|
|
15
|
-
uipath_langchain/_utils/_settings.py,sha256=
|
|
14
|
+
uipath_langchain/_utils/_request_mixin.py,sha256=VjOyH9fS8WhKEXufoAoSRhfaY1jumLacRwBsbaRsiy8,31307
|
|
15
|
+
uipath_langchain/_utils/_settings.py,sha256=6E4yGzrEj3P3DcqCx_Y_lHQREQRfBm0yH1mSitiSJIU,3188
|
|
16
16
|
uipath_langchain/_utils/_sleep_policy.py,sha256=e9pHdjmcCj4CVoFM1jMyZFelH11YatsgWfpyrfXzKBQ,1251
|
|
17
17
|
uipath_langchain/agent/exceptions/__init__.py,sha256=GFh8mtsoKYghTHe93rRbIycWOW8dPt0okQjZuc2N-Hc,158
|
|
18
18
|
uipath_langchain/agent/exceptions/exceptions.py,sha256=G7LdeQ9uMljLGTlixWBHvNJ1ULtUIDIkzApjfbsF6_0,216
|
|
19
|
-
uipath_langchain/agent/guardrails/__init__.py,sha256=
|
|
20
|
-
uipath_langchain/agent/guardrails/guardrail_nodes.py,sha256=
|
|
21
|
-
uipath_langchain/agent/guardrails/guardrails_factory.py,sha256=
|
|
22
|
-
uipath_langchain/agent/guardrails/
|
|
23
|
-
uipath_langchain/agent/guardrails/
|
|
24
|
-
uipath_langchain/agent/guardrails/actions/__init__.py,sha256=
|
|
25
|
-
uipath_langchain/agent/guardrails/actions/base_action.py,sha256=
|
|
26
|
-
uipath_langchain/agent/guardrails/actions/block_action.py,sha256=
|
|
27
|
-
uipath_langchain/agent/guardrails/actions/escalate_action.py,sha256=
|
|
28
|
-
uipath_langchain/agent/guardrails/actions/
|
|
19
|
+
uipath_langchain/agent/guardrails/__init__.py,sha256=2lM-DFLp8oHR6qpkZy8_E93RL_tXKhtzbbsL00_m6TA,114
|
|
20
|
+
uipath_langchain/agent/guardrails/guardrail_nodes.py,sha256=GNfgz45RY2Ic_xchB7KvP3asURxAK9ZBbs8Z7VE_Pac,9491
|
|
21
|
+
uipath_langchain/agent/guardrails/guardrails_factory.py,sha256=4_YvYi13CdVr5qxpwkDpnXjlGe6L4XNmnbMeu9_BVUM,8998
|
|
22
|
+
uipath_langchain/agent/guardrails/types.py,sha256=guVi19x7x5atcaXTUWK_c9O3bP6HfmnX_1Er1ErsnSE,169
|
|
23
|
+
uipath_langchain/agent/guardrails/utils.py,sha256=dmlfDG_tBzbGhfOJrfhysaCAuVHL0GEEzkXqFQDen1M,4652
|
|
24
|
+
uipath_langchain/agent/guardrails/actions/__init__.py,sha256=o_xoSaqcaOM33CRldvmnmux66BIK9fbcv994lB0m1-U,313
|
|
25
|
+
uipath_langchain/agent/guardrails/actions/base_action.py,sha256=89POG1KQ0x1KPXKP8jT42M04SYoizp5E_co2-Ykq8gc,710
|
|
26
|
+
uipath_langchain/agent/guardrails/actions/block_action.py,sha256=AMgpeHG41XbRV6bdFX78OLLJ-CTJ1lez6ai6s60zG4M,1374
|
|
27
|
+
uipath_langchain/agent/guardrails/actions/escalate_action.py,sha256=JAGIBLB9FHMa104pTa9ddKi6wW6Ll22qjSZBMzqsYvk,18050
|
|
28
|
+
uipath_langchain/agent/guardrails/actions/filter_action.py,sha256=PbHzz8_vZZ4X-kbHs9-ab3RFBXESlS5GOBEs8vSB-Sg,2185
|
|
29
|
+
uipath_langchain/agent/guardrails/actions/log_action.py,sha256=1nknvqYFBmBFACWLhtreU9Uun40_H2ETsg9c6-IacGM,1968
|
|
29
30
|
uipath_langchain/agent/react/__init__.py,sha256=BhRWUMZ9yLtBr7SyRwS-XMKG9o84DROyedCMpE6j1RU,362
|
|
30
|
-
uipath_langchain/agent/react/agent.py,sha256=
|
|
31
|
-
uipath_langchain/agent/react/constants.py,sha256=
|
|
31
|
+
uipath_langchain/agent/react/agent.py,sha256=Foz9HagXh3lA-QDZSx6a302JAJnudL7YQ1njLShWSGA,4552
|
|
32
|
+
uipath_langchain/agent/react/constants.py,sha256=yRi3sRxl2CSlshcn_ooWRN3TCcKdRMxJIwmiUQpAtFE,38
|
|
32
33
|
uipath_langchain/agent/react/init_node.py,sha256=plhyTecBOBsm-YHfi73ZN3pJhe24tpCMr53IfRzN6SA,566
|
|
33
|
-
uipath_langchain/agent/react/llm_node.py,sha256=
|
|
34
|
-
uipath_langchain/agent/react/router.py,sha256
|
|
34
|
+
uipath_langchain/agent/react/llm_node.py,sha256=Qy9A_s7L5IKfHXbhT8PChyUWYB-RRUjkW49C958wCRs,2484
|
|
35
|
+
uipath_langchain/agent/react/router.py,sha256=-I3Ym0-FLQfg1J0v9Wibb0Xq7rTN73_FLsjVxa31aTQ,4080
|
|
35
36
|
uipath_langchain/agent/react/terminate_node.py,sha256=G2k2xMgocfNC13RjF9A2Kea6KiPINmdnxiZHQCS_fAY,2734
|
|
36
|
-
uipath_langchain/agent/react/types.py,sha256=
|
|
37
|
-
uipath_langchain/agent/react/utils.py,sha256=
|
|
37
|
+
uipath_langchain/agent/react/types.py,sha256=oZtWQiOwmghWUftrC0BzP5emNFq9uvWB2jUYFcWILPs,1417
|
|
38
|
+
uipath_langchain/agent/react/utils.py,sha256=EhrbvnkrftR79LTKHBVBv9e7oBzs0xgGXhtk_bLF_fQ,1285
|
|
39
|
+
uipath_langchain/agent/react/guardrails/guardrails_subgraph.py,sha256=NDtdngT6Z5amQzZMscX3rW2vupPSrY7hMlACukqQovk,11134
|
|
38
40
|
uipath_langchain/agent/react/tools/__init__.py,sha256=LGfG8Dc32ffKdXQyMI2oYzhNnTs1wbzsddXz6eU-0MY,102
|
|
39
41
|
uipath_langchain/agent/react/tools/tools.py,sha256=vFBGnFrGocX__sotKisMJr2lxRRVqA0-uThzzhPADIw,1443
|
|
40
|
-
uipath_langchain/agent/tools/__init__.py,sha256=
|
|
42
|
+
uipath_langchain/agent/tools/__init__.py,sha256=0ptYdkW3zIjdQljY7fsVKZKI-83lyprhCBoMJFlhgtg,527
|
|
41
43
|
uipath_langchain/agent/tools/context_tool.py,sha256=oPvMVNxeb5q6xVlSScvMGcpT1wOUlFGOwsnFoP_dbuE,1772
|
|
42
44
|
uipath_langchain/agent/tools/escalation_tool.py,sha256=VYNUEjfNMw3k2ESWMtn3RZ66WYRJcBvXpQaGzMXWCSw,3819
|
|
43
45
|
uipath_langchain/agent/tools/integration_tool.py,sha256=EcCJ2g6NyyJj6sQeVyzLY1JL3sAtTAWu2dwVvWOkHgo,6555
|
|
46
|
+
uipath_langchain/agent/tools/mcp_tool.py,sha256=NjSWoCYXixIheLWqYUCiYdnkI1umHIvTEBXJphg_WTM,3202
|
|
44
47
|
uipath_langchain/agent/tools/process_tool.py,sha256=3RkqqeXzmm8mSNaesEQKST1SvB8UEl2H69Qyh0lBv1c,1696
|
|
45
48
|
uipath_langchain/agent/tools/static_args.py,sha256=_bS-ENdVNxSQ74fi6H0bk7FG9QVZTqitM6es3eLsGq4,4521
|
|
46
49
|
uipath_langchain/agent/tools/structured_tool_with_output_type.py,sha256=9EZB1WlVt7FFBf20jX3Av9vJf9MaDNCnKLUhMBIrtLQ,354
|
|
47
50
|
uipath_langchain/agent/tools/tool_factory.py,sha256=o0bd_IMIIsVc0d6gODf3Lk-yiG6eXC9o9NYMgumY4GI,1419
|
|
48
51
|
uipath_langchain/agent/tools/tool_node.py,sha256=TnXsjoShvhsoBuV5RoUVoJCc2zYPKSnJYSC9MGJoeOk,707
|
|
49
52
|
uipath_langchain/agent/tools/utils.py,sha256=DsFeZ7kDzFaZ0bGHQN6TlGMJ90wYr7P1Vo1rpHPHWws,401
|
|
50
|
-
uipath_langchain/chat/__init__.py,sha256=
|
|
51
|
-
uipath_langchain/chat/bedrock.py,sha256
|
|
52
|
-
uipath_langchain/chat/gemini.py,sha256=Zlz5KVoHdzlemzFTmFVjKCloaFo2lMiBbBq9MCJ9Lds,11903
|
|
53
|
+
uipath_langchain/chat/__init__.py,sha256=YlbvzwZz2269kQK438YK8PDIpt5EoRukmpedazpSHHY,387
|
|
54
|
+
uipath_langchain/chat/bedrock.py,sha256=-e1Spix8QizWJf7qLPTJ1-Bs6QLmAHF4VvZSb2kn_Uw,6662
|
|
53
55
|
uipath_langchain/chat/mapper.py,sha256=XsWFbg6U4kW5Yj_ANvYUZ4HICybHcv9qWdfPPU8UTKA,11950
|
|
54
56
|
uipath_langchain/chat/models.py,sha256=cbIRw-YhEvQcgt0DVTHc84lKIjeDhsQo5oxvBz9luD0,18168
|
|
55
|
-
uipath_langchain/chat/openai.py,sha256=
|
|
56
|
-
uipath_langchain/chat/supported_models.py,sha256=
|
|
57
|
+
uipath_langchain/chat/openai.py,sha256=laLJbSoviyD1HdHRGL7zmGuDZngFZvQb93yNcSoR3r4,5829
|
|
58
|
+
uipath_langchain/chat/supported_models.py,sha256=KttS2PZZzh_-fPub-bAenq2V0LfqCUPMmBtQnRP0Lik,1561
|
|
59
|
+
uipath_langchain/chat/vertex.py,sha256=KX584HDzW8x1x-BjfTXpYnl9ofAAVNsphto29O7uoD0,10428
|
|
57
60
|
uipath_langchain/embeddings/__init__.py,sha256=QICtYB58ZyqFfDQrEaO8lTEgAU5NuEKlR7iIrS0OBtc,156
|
|
58
|
-
uipath_langchain/embeddings/embeddings.py,sha256=
|
|
61
|
+
uipath_langchain/embeddings/embeddings.py,sha256=EeVWEpG-DhfewiHJAHNrUMtzIS2oF6rLUPNRtfRg0_8,7137
|
|
59
62
|
uipath_langchain/retrievers/__init__.py,sha256=rOn7PyyHgZ4pMnXWPkGqmuBmx8eGuo-Oyndo7Wm9IUU,108
|
|
60
63
|
uipath_langchain/retrievers/context_grounding_retriever.py,sha256=eVDt49dcyBVM8wgBn5FmaXK8gfgsOaNBC8tYnebiQeQ,2201
|
|
61
64
|
uipath_langchain/runtime/__init__.py,sha256=h4pDyPy8kAYWvSohG3qFmx-bTVTIj1spbMqc7kO674E,1017
|
|
@@ -65,12 +68,12 @@ uipath_langchain/runtime/errors.py,sha256=iepiILaDtanusvyYBQSRsPDmKB-6GB94OXKATw
|
|
|
65
68
|
uipath_langchain/runtime/factory.py,sha256=_fbYViNg6uuApUZiEJjYHQU2noH5dqFBTXWEPy-Oj_k,11043
|
|
66
69
|
uipath_langchain/runtime/graph.py,sha256=1h_eaXkyN0G43BS0WUMoK_UNDao8jrmcyJqGVgwVEjM,5513
|
|
67
70
|
uipath_langchain/runtime/runtime.py,sha256=cK7hewu6Mv8rQbUir03JgPUgv-zqVtYQ1YOfoMLB89I,16904
|
|
68
|
-
uipath_langchain/runtime/schema.py,sha256=
|
|
71
|
+
uipath_langchain/runtime/schema.py,sha256=BKtWYVVpK2FfdWO75iQEc-md7qh5z8DqsRPsbHJ_WBI,15130
|
|
69
72
|
uipath_langchain/runtime/storage.py,sha256=Vem6kz0yGIUgIhTByzguvNOkb6N40b_ba8jcbjGo_eY,3954
|
|
70
73
|
uipath_langchain/vectorstores/__init__.py,sha256=w8qs1P548ud1aIcVA_QhBgf_jZDrRMK5Lono78yA8cs,114
|
|
71
74
|
uipath_langchain/vectorstores/context_grounding_vectorstore.py,sha256=SfRvIenOzn4r7jng_I9GIyknyeDx2hdW63O1rZY-ZLQ,8407
|
|
72
|
-
uipath_langchain-0.1.
|
|
73
|
-
uipath_langchain-0.1.
|
|
74
|
-
uipath_langchain-0.1.
|
|
75
|
-
uipath_langchain-0.1.
|
|
76
|
-
uipath_langchain-0.1.
|
|
75
|
+
uipath_langchain-0.1.34.dist-info/METADATA,sha256=Ntt7gH8D0fLKzbpKGWqW5iIu6O--cXe2J-eal5WPSDk,5890
|
|
76
|
+
uipath_langchain-0.1.34.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
77
|
+
uipath_langchain-0.1.34.dist-info/entry_points.txt,sha256=Wptt1FbvhcDzNQxDlu2Lt1ngBOdDIFPlM1p8WzXf6wQ,171
|
|
78
|
+
uipath_langchain-0.1.34.dist-info/licenses/LICENSE,sha256=JDpt-uotAkHFmxpwxi6gwx6HQ25e-lG4U_Gzcvgp7JY,1063
|
|
79
|
+
uipath_langchain-0.1.34.dist-info/RECORD,,
|
uipath_langchain/chat/gemini.py
DELETED
|
@@ -1,330 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
import os
|
|
3
|
-
from typing import Optional, Union
|
|
4
|
-
|
|
5
|
-
import aiohttp
|
|
6
|
-
from pydantic import Field
|
|
7
|
-
from uipath.utils import EndpointManager
|
|
8
|
-
|
|
9
|
-
from .supported_models import GeminiModels
|
|
10
|
-
|
|
11
|
-
logger = logging.getLogger(__name__)
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
def _check_vertex_dependencies() -> None:
|
|
15
|
-
"""Check if required dependencies for UiPathChatVertex are installed."""
|
|
16
|
-
import importlib.util
|
|
17
|
-
|
|
18
|
-
missing_packages = []
|
|
19
|
-
|
|
20
|
-
if importlib.util.find_spec("langchain_google_vertexai") is None:
|
|
21
|
-
missing_packages.append("langchain-google-vertexai")
|
|
22
|
-
|
|
23
|
-
if importlib.util.find_spec("langchain_community") is None:
|
|
24
|
-
missing_packages.append("langchain-community")
|
|
25
|
-
|
|
26
|
-
if missing_packages:
|
|
27
|
-
packages_str = ", ".join(missing_packages)
|
|
28
|
-
raise ImportError(
|
|
29
|
-
f"The following packages are required to use UiPathChatVertex: {packages_str}\n"
|
|
30
|
-
"Please install them using one of the following methods:\n\n"
|
|
31
|
-
" # Using pip:\n"
|
|
32
|
-
f" pip install uipath-langchain[vertex]\n\n"
|
|
33
|
-
" # Using uv:\n"
|
|
34
|
-
f" uv add 'uipath-langchain[vertex]'\n\n"
|
|
35
|
-
)
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
_check_vertex_dependencies()
|
|
39
|
-
|
|
40
|
-
from google.auth.credentials import AnonymousCredentials
|
|
41
|
-
from google.cloud.aiplatform_v1.services.prediction_service import (
|
|
42
|
-
PredictionServiceAsyncClient as v1PredictionServiceAsyncClient,
|
|
43
|
-
)
|
|
44
|
-
from google.cloud.aiplatform_v1.services.prediction_service import (
|
|
45
|
-
PredictionServiceClient as v1PredictionServiceClient,
|
|
46
|
-
)
|
|
47
|
-
from google.cloud.aiplatform_v1beta1.services.prediction_service import (
|
|
48
|
-
PredictionServiceAsyncClient as v1beta1PredictionServiceAsyncClient,
|
|
49
|
-
)
|
|
50
|
-
from google.cloud.aiplatform_v1beta1.services.prediction_service import (
|
|
51
|
-
PredictionServiceClient as v1beta1PredictionServiceClient,
|
|
52
|
-
)
|
|
53
|
-
from google.cloud.aiplatform_v1beta1.services.prediction_service.transports.base import (
|
|
54
|
-
PredictionServiceTransport,
|
|
55
|
-
)
|
|
56
|
-
from google.cloud.aiplatform_v1beta1.services.prediction_service.transports.rest import (
|
|
57
|
-
PredictionServiceRestTransport,
|
|
58
|
-
)
|
|
59
|
-
from langchain_community.utilities.vertexai import (
|
|
60
|
-
get_client_info,
|
|
61
|
-
)
|
|
62
|
-
from langchain_google_vertexai import ChatVertexAI
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
class CustomPredictionServiceRestTransport(PredictionServiceRestTransport):
|
|
66
|
-
def __init__(self, llmgw_url: str, custom_headers: dict[str, str], **kwargs):
|
|
67
|
-
self.llmgw_url = llmgw_url
|
|
68
|
-
self.custom_headers = custom_headers or {}
|
|
69
|
-
|
|
70
|
-
kwargs.setdefault("credentials", AnonymousCredentials())
|
|
71
|
-
super().__init__(**kwargs)
|
|
72
|
-
|
|
73
|
-
original_request = self._session.request
|
|
74
|
-
|
|
75
|
-
def redirected_request(method, url, **kwargs_inner):
|
|
76
|
-
headers = kwargs_inner.pop("headers", {})
|
|
77
|
-
headers.update(self.custom_headers)
|
|
78
|
-
|
|
79
|
-
is_streaming = kwargs_inner.get("stream", False)
|
|
80
|
-
headers["X-UiPath-Streaming-Enabled"] = "true" if is_streaming else "false"
|
|
81
|
-
|
|
82
|
-
return original_request(
|
|
83
|
-
method, self.llmgw_url, headers=headers, **kwargs_inner
|
|
84
|
-
)
|
|
85
|
-
|
|
86
|
-
self._session.request = redirected_request # type: ignore[method-assign, assignment]
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
class CustomPredictionServiceRestAsyncTransport:
|
|
90
|
-
"""
|
|
91
|
-
Custom async transport for calling UiPath LLM Gateway.
|
|
92
|
-
|
|
93
|
-
Uses aiohttp for REST/HTTP communication instead of gRPC.
|
|
94
|
-
Handles both regular and streaming responses from the gateway.
|
|
95
|
-
"""
|
|
96
|
-
|
|
97
|
-
def __init__(self, llmgw_url: str, custom_headers: dict[str, str], **kwargs):
|
|
98
|
-
self.llmgw_url = llmgw_url
|
|
99
|
-
self.custom_headers = custom_headers or {}
|
|
100
|
-
|
|
101
|
-
def _serialize_request(self, request) -> str:
|
|
102
|
-
"""Convert proto-plus request to JSON string."""
|
|
103
|
-
import json
|
|
104
|
-
|
|
105
|
-
from proto import ( # type: ignore[import-untyped]
|
|
106
|
-
Message as ProtoMessage,
|
|
107
|
-
)
|
|
108
|
-
|
|
109
|
-
if isinstance(request, ProtoMessage):
|
|
110
|
-
request_dict = type(request).to_dict(
|
|
111
|
-
request, preserving_proto_field_name=False
|
|
112
|
-
)
|
|
113
|
-
return json.dumps(request_dict)
|
|
114
|
-
else:
|
|
115
|
-
from google.protobuf.json_format import MessageToJson
|
|
116
|
-
|
|
117
|
-
return MessageToJson(request, preserving_proto_field_name=False)
|
|
118
|
-
|
|
119
|
-
def _get_response_class(self, request):
|
|
120
|
-
"""Get the response class corresponding to the request class."""
|
|
121
|
-
import importlib
|
|
122
|
-
|
|
123
|
-
response_class_name = request.__class__.__name__.replace("Request", "Response")
|
|
124
|
-
response_class = getattr(
|
|
125
|
-
request.__class__.__module__, response_class_name, None
|
|
126
|
-
)
|
|
127
|
-
|
|
128
|
-
if response_class is None:
|
|
129
|
-
module = importlib.import_module(request.__class__.__module__)
|
|
130
|
-
response_class = getattr(module, response_class_name, None)
|
|
131
|
-
|
|
132
|
-
return response_class
|
|
133
|
-
|
|
134
|
-
def _deserialize_response(self, response_json: str, request):
|
|
135
|
-
"""Convert JSON string to proto-plus response object."""
|
|
136
|
-
import json
|
|
137
|
-
|
|
138
|
-
from proto import Message as ProtoMessage
|
|
139
|
-
|
|
140
|
-
response_class = self._get_response_class(request)
|
|
141
|
-
|
|
142
|
-
if response_class and isinstance(request, ProtoMessage):
|
|
143
|
-
return response_class.from_json(response_json, ignore_unknown_fields=True)
|
|
144
|
-
elif response_class:
|
|
145
|
-
from google.protobuf.json_format import Parse
|
|
146
|
-
|
|
147
|
-
return Parse(response_json, response_class(), ignore_unknown_fields=True)
|
|
148
|
-
else:
|
|
149
|
-
return json.loads(response_json)
|
|
150
|
-
|
|
151
|
-
async def _make_request(self, request_json: str, streaming: bool = False):
|
|
152
|
-
"""Make HTTP POST request to UiPath gateway."""
|
|
153
|
-
headers = self.custom_headers.copy()
|
|
154
|
-
headers["Content-Type"] = "application/json"
|
|
155
|
-
|
|
156
|
-
if streaming:
|
|
157
|
-
headers["X-UiPath-Streaming-Enabled"] = "true"
|
|
158
|
-
|
|
159
|
-
connector = aiohttp.TCPConnector(ssl=True)
|
|
160
|
-
async with aiohttp.ClientSession(connector=connector) as session:
|
|
161
|
-
async with session.post(
|
|
162
|
-
self.llmgw_url, headers=headers, data=request_json
|
|
163
|
-
) as response:
|
|
164
|
-
if response.status != 200:
|
|
165
|
-
error_text = await response.text()
|
|
166
|
-
raise Exception(f"HTTP {response.status}: {error_text}")
|
|
167
|
-
|
|
168
|
-
return await response.text()
|
|
169
|
-
|
|
170
|
-
async def generate_content(self, request, **kwargs):
|
|
171
|
-
"""Handle non-streaming generate_content calls."""
|
|
172
|
-
request_json = self._serialize_request(request)
|
|
173
|
-
response_text = await self._make_request(request_json, streaming=False)
|
|
174
|
-
return self._deserialize_response(response_text, request)
|
|
175
|
-
|
|
176
|
-
def stream_generate_content(self, request, **kwargs):
|
|
177
|
-
"""
|
|
178
|
-
Handle streaming generate_content calls.
|
|
179
|
-
|
|
180
|
-
Returns a coroutine that yields an async iterator.
|
|
181
|
-
"""
|
|
182
|
-
return self._create_stream_awaitable(request)
|
|
183
|
-
|
|
184
|
-
async def _create_stream_awaitable(self, request):
|
|
185
|
-
"""Awaitable wrapper that returns the async generator."""
|
|
186
|
-
return self._stream_implementation(request)
|
|
187
|
-
|
|
188
|
-
async def _stream_implementation(self, request):
|
|
189
|
-
"""
|
|
190
|
-
Async generator that yields streaming response chunks.
|
|
191
|
-
|
|
192
|
-
Parses the array and yields each chunk individually.
|
|
193
|
-
"""
|
|
194
|
-
import json
|
|
195
|
-
|
|
196
|
-
request_json = self._serialize_request(request)
|
|
197
|
-
response_text = await self._make_request(request_json, streaming=True)
|
|
198
|
-
|
|
199
|
-
try:
|
|
200
|
-
chunks_array = json.loads(response_text)
|
|
201
|
-
if isinstance(chunks_array, list):
|
|
202
|
-
logger.info(f"Streaming: yielding {len(chunks_array)} chunks")
|
|
203
|
-
for chunk_data in chunks_array:
|
|
204
|
-
chunk_json = json.dumps(chunk_data)
|
|
205
|
-
yield self._deserialize_response(chunk_json, request)
|
|
206
|
-
return
|
|
207
|
-
except Exception as e:
|
|
208
|
-
logger.info(f"Not a JSON array, trying single response: {e}")
|
|
209
|
-
|
|
210
|
-
try:
|
|
211
|
-
yield self._deserialize_response(response_text, request)
|
|
212
|
-
except Exception as e:
|
|
213
|
-
logger.error(f"Failed to parse streaming response: {e}")
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
class UiPathChatVertex(ChatVertexAI):
|
|
217
|
-
transport: Optional[PredictionServiceTransport] = Field(default=None)
|
|
218
|
-
async_transport: Optional[CustomPredictionServiceRestAsyncTransport] = Field(
|
|
219
|
-
default=None
|
|
220
|
-
)
|
|
221
|
-
async_client: Optional[
|
|
222
|
-
Union[v1beta1PredictionServiceAsyncClient, v1PredictionServiceAsyncClient]
|
|
223
|
-
] = Field(default=None)
|
|
224
|
-
|
|
225
|
-
def __init__(
|
|
226
|
-
self,
|
|
227
|
-
org_id: Optional[str] = None,
|
|
228
|
-
tenant_id: Optional[str] = None,
|
|
229
|
-
token: Optional[str] = None,
|
|
230
|
-
model_name: str = GeminiModels.gemini_2_5_flash,
|
|
231
|
-
**kwargs,
|
|
232
|
-
):
|
|
233
|
-
org_id = org_id or os.getenv("UIPATH_ORGANIZATION_ID")
|
|
234
|
-
tenant_id = tenant_id or os.getenv("UIPATH_TENANT_ID")
|
|
235
|
-
token = token or os.getenv("UIPATH_ACCESS_TOKEN")
|
|
236
|
-
|
|
237
|
-
if not org_id:
|
|
238
|
-
raise ValueError(
|
|
239
|
-
"UIPATH_ORGANIZATION_ID environment variable or org_id parameter is required"
|
|
240
|
-
)
|
|
241
|
-
if not tenant_id:
|
|
242
|
-
raise ValueError(
|
|
243
|
-
"UIPATH_TENANT_ID environment variable or tenant_id parameter is required"
|
|
244
|
-
)
|
|
245
|
-
if not token:
|
|
246
|
-
raise ValueError(
|
|
247
|
-
"UIPATH_ACCESS_TOKEN environment variable or token parameter is required"
|
|
248
|
-
)
|
|
249
|
-
|
|
250
|
-
self._vendor = "vertexai"
|
|
251
|
-
self._model_name = model_name
|
|
252
|
-
self._url: Optional[str] = None
|
|
253
|
-
|
|
254
|
-
llmgw_url = self._build_base_url()
|
|
255
|
-
|
|
256
|
-
headers = self._build_headers(token)
|
|
257
|
-
|
|
258
|
-
super().__init__(
|
|
259
|
-
model=model_name,
|
|
260
|
-
project=os.getenv("VERTEXAI_PROJECT", "none"),
|
|
261
|
-
location=os.getenv("VERTEXAI_LOCATION", "us-central1"),
|
|
262
|
-
**kwargs,
|
|
263
|
-
)
|
|
264
|
-
|
|
265
|
-
self.transport = CustomPredictionServiceRestTransport(
|
|
266
|
-
llmgw_url=llmgw_url, custom_headers=headers
|
|
267
|
-
)
|
|
268
|
-
|
|
269
|
-
self.async_transport = CustomPredictionServiceRestAsyncTransport(
|
|
270
|
-
llmgw_url=llmgw_url, custom_headers=headers
|
|
271
|
-
)
|
|
272
|
-
|
|
273
|
-
@property
|
|
274
|
-
def prediction_client(
|
|
275
|
-
self,
|
|
276
|
-
) -> Union[v1beta1PredictionServiceClient, v1PredictionServiceClient]:
|
|
277
|
-
if self.client is None:
|
|
278
|
-
if self.endpoint_version == "v1":
|
|
279
|
-
self.client = v1PredictionServiceClient(
|
|
280
|
-
client_options=self.client_options,
|
|
281
|
-
client_info=get_client_info(module=self._user_agent),
|
|
282
|
-
transport=self.transport, # type: ignore[arg-type]
|
|
283
|
-
)
|
|
284
|
-
else:
|
|
285
|
-
self.client = v1beta1PredictionServiceClient(
|
|
286
|
-
client_options=self.client_options,
|
|
287
|
-
client_info=get_client_info(module=self._user_agent),
|
|
288
|
-
transport=self.transport,
|
|
289
|
-
)
|
|
290
|
-
return self.client
|
|
291
|
-
|
|
292
|
-
@property
|
|
293
|
-
def async_prediction_client(
|
|
294
|
-
self,
|
|
295
|
-
) -> Union[
|
|
296
|
-
v1beta1PredictionServiceAsyncClient,
|
|
297
|
-
v1PredictionServiceAsyncClient,
|
|
298
|
-
]:
|
|
299
|
-
return self.async_transport # type: ignore[return-value]
|
|
300
|
-
|
|
301
|
-
@property
|
|
302
|
-
def endpoint(self) -> str:
|
|
303
|
-
vendor_endpoint = EndpointManager.get_vendor_endpoint()
|
|
304
|
-
formatted_endpoint = vendor_endpoint.format(
|
|
305
|
-
vendor=self._vendor,
|
|
306
|
-
model=self._model_name,
|
|
307
|
-
)
|
|
308
|
-
return formatted_endpoint
|
|
309
|
-
|
|
310
|
-
def _build_headers(self, token: str) -> dict[str, str]:
|
|
311
|
-
headers = {
|
|
312
|
-
# "X-UiPath-LlmGateway-ApiFlavor": "auto",
|
|
313
|
-
"Authorization": f"Bearer {token}",
|
|
314
|
-
}
|
|
315
|
-
if job_key := os.getenv("UIPATH_JOB_KEY"):
|
|
316
|
-
headers["X-UiPath-JobKey"] = job_key
|
|
317
|
-
if process_key := os.getenv("UIPATH_PROCESS_KEY"):
|
|
318
|
-
headers["X-UiPath-ProcessKey"] = process_key
|
|
319
|
-
return headers
|
|
320
|
-
|
|
321
|
-
def _build_base_url(self) -> str:
|
|
322
|
-
if not self._url:
|
|
323
|
-
env_uipath_url = os.getenv("UIPATH_URL")
|
|
324
|
-
|
|
325
|
-
if env_uipath_url:
|
|
326
|
-
self._url = f"{env_uipath_url.rstrip('/')}/{self.endpoint}"
|
|
327
|
-
else:
|
|
328
|
-
raise ValueError("UIPATH_URL environment variable is required")
|
|
329
|
-
|
|
330
|
-
return self._url
|
|
File without changes
|
|
File without changes
|
|
File without changes
|