uipath-langchain 0.0.146__py3-none-any.whl → 0.0.148__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of uipath-langchain might be problematic. Click here for more details.

@@ -17,7 +17,11 @@ from tenacity import (
17
17
  stop_after_attempt,
18
18
  wait_exponential_jitter,
19
19
  )
20
- from uipath._cli._runtime._contracts import UiPathErrorCategory, UiPathRuntimeError
20
+ from uipath._cli._runtime._contracts import (
21
+ UiPathErrorCategory,
22
+ UiPathErrorCode,
23
+ UiPathRuntimeError,
24
+ )
21
25
  from uipath._utils._ssl_context import get_httpx_client_kwargs
22
26
 
23
27
  from uipath_langchain._cli._runtime._exception import (
@@ -59,7 +63,7 @@ def _get_access_token(data):
59
63
  return get_uipath_token_header(settings)
60
64
  except ValidationError:
61
65
  raise UiPathRuntimeError(
62
- code="AUTHENTICATION_REQUIRED",
66
+ UiPathErrorCode.AUTHENTICATION_REQUIRED,
63
67
  title="Authorization required",
64
68
  detail="Authorization required. Please run uipath auth",
65
69
  category=UiPathErrorCategory.USER,
@@ -1,12 +1,13 @@
1
1
  """UiPath ReAct Agent implementation"""
2
2
 
3
3
  from .agent import create_agent
4
- from .state import AgentGraphNode, AgentGraphState
4
+ from .types import AgentGraphConfig, AgentGraphNode, AgentGraphState
5
5
  from .utils import resolve_output_model
6
6
 
7
7
  __all__ = [
8
8
  "create_agent",
9
- "AgentGraphState",
10
- "AgentGraphNode",
11
9
  "resolve_output_model",
10
+ "AgentGraphNode",
11
+ "AgentGraphState",
12
+ "AgentGraphConfig",
12
13
  ]
@@ -18,11 +18,11 @@ from .llm_node import (
18
18
  from .router import (
19
19
  route_agent,
20
20
  )
21
- from .state import AgentGraphNode, AgentGraphState
22
21
  from .terminate_node import (
23
22
  create_terminate_node,
24
23
  )
25
24
  from .tools import create_flow_control_tools
25
+ from .types import AgentGraphConfig, AgentGraphNode, AgentGraphState
26
26
 
27
27
 
28
28
  def create_agent(
@@ -32,13 +32,16 @@ def create_agent(
32
32
  *,
33
33
  state_schema: Type[AgentGraphState] = AgentGraphState,
34
34
  response_format: type[BaseModel] | None = None,
35
- recursion_limit: int = 50,
35
+ config: AgentGraphConfig | None = None,
36
36
  ) -> StateGraph[AgentGraphState]:
37
37
  """Build agent graph with INIT -> AGENT <-> TOOLS loop, terminated by control flow tools.
38
38
 
39
39
  Control flow tools (end_execution, raise_error) are auto-injected alongside regular tools.
40
40
  """
41
- os.environ["LANGCHAIN_RECURSION_LIMIT"] = str(recursion_limit)
41
+ if config is None:
42
+ config = AgentGraphConfig()
43
+
44
+ os.environ["LANGCHAIN_RECURSION_LIMIT"] = str(config.recursion_limit)
42
45
 
43
46
  agent_tools = list(tools)
44
47
  flow_control_tools: list[BaseTool] = create_flow_control_tools(response_format)
@@ -4,7 +4,7 @@ from typing import Sequence
4
4
 
5
5
  from langchain_core.messages import HumanMessage, SystemMessage
6
6
 
7
- from .state import AgentGraphState
7
+ from .types import AgentGraphState
8
8
 
9
9
 
10
10
  def create_init_node(
@@ -7,7 +7,7 @@ from langchain_core.messages import AIMessage, AnyMessage
7
7
  from langchain_core.tools import BaseTool
8
8
 
9
9
  from .constants import MAX_SUCCESSIVE_COMPLETIONS
10
- from .state import AgentGraphState
10
+ from .types import AgentGraphState
11
11
  from .utils import count_successive_completions
12
12
 
13
13
 
@@ -7,7 +7,7 @@ from uipath.agent.react import END_EXECUTION_TOOL, RAISE_ERROR_TOOL
7
7
 
8
8
  from .constants import MAX_SUCCESSIVE_COMPLETIONS
9
9
  from .exceptions import AgentNodeRoutingException
10
- from .state import AgentGraphNode, AgentGraphState
10
+ from .types import AgentGraphNode, AgentGraphState
11
11
  from .utils import count_successive_completions
12
12
 
13
13
  FLOW_CONTROL_TOOLS = [END_EXECUTION_TOOL.name, RAISE_ERROR_TOOL.name]
@@ -11,7 +11,7 @@ from .exceptions import (
11
11
  AgentNodeRoutingException,
12
12
  AgentTerminationException,
13
13
  )
14
- from .state import AgentGraphState
14
+ from .types import AgentGraphState
15
15
 
16
16
 
17
17
  def create_terminate_node(
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  from enum import StrEnum
4
4
 
5
5
  from langgraph.graph import MessagesState
6
+ from pydantic import BaseModel, Field
6
7
 
7
8
 
8
9
  class AgentGraphState(MessagesState):
@@ -16,3 +17,9 @@ class AgentGraphNode(StrEnum):
16
17
  AGENT = "agent"
17
18
  TOOLS = "tools"
18
19
  TERMINATE = "terminate"
20
+
21
+
22
+ class AgentGraphConfig(BaseModel):
23
+ recursion_limit: int = Field(
24
+ default=50, ge=1, description="Maximum recursion limit for the agent graph"
25
+ )
@@ -2,23 +2,17 @@
2
2
  Vector store implementation that connects to UiPath Context Grounding as a backend.
3
3
 
4
4
  This is a read-only vector store that uses the UiPath Context Grounding API to retrieve documents.
5
-
6
- You need to set the following environment variables (also see .env.example):
7
- ### - UIPATH_URL="https://alpha.uipath.com/{ORG_ID}/{TENANT_ID}"
8
- ### - UIPATH_ACCESS_TOKEN={BEARER_TOKEN_WITH_CONTEXT_GROUNDING_PERMISSIONS}
9
- ### - UIPATH_FOLDER_PATH="" - this can be left empty
10
- ### - UIPATH_FOLDER_KEY="" - this can be left empty
11
5
  """
12
6
 
13
7
  from collections.abc import Iterable
14
- from typing import Any, Optional, TypeVar
8
+ from typing import Any, Self
15
9
 
16
10
  from langchain_core.documents import Document
17
11
  from langchain_core.embeddings import Embeddings
18
12
  from langchain_core.vectorstores import VectorStore
13
+ from typing_extensions import override
19
14
  from uipath import UiPath
20
-
21
- VST = TypeVar("VST", bound="ContextGroundingVectorStore")
15
+ from uipath.models.context_grounding import ContextGroundingQueryResponse
22
16
 
23
17
 
24
18
  class ContextGroundingVectorStore(VectorStore):
@@ -26,42 +20,32 @@ class ContextGroundingVectorStore(VectorStore):
26
20
 
27
21
  This class provides a straightforward implementation that connects to the
28
22
  UiPath Context Grounding API for semantic searching.
29
-
30
- Example:
31
- .. code-block:: python
32
-
33
- from uipath_agents_gym.tools.ecs_vectorstore import ContextGroundingVectorStore
34
-
35
- # Initialize the vector store with an index name
36
- vectorstore = ContextGroundingVectorStore(index_name="ECCN")
37
-
38
- # Perform similarity search
39
- docs_with_scores = vectorstore.similarity_search_with_score(
40
- "How do I process an invoice?", k=5
41
- )
42
23
  """
43
24
 
44
25
  def __init__(
45
26
  self,
46
27
  index_name: str,
47
- folder_path: Optional[str] = None,
48
- uipath_sdk: Optional[UiPath] = None,
28
+ uipath_sdk: UiPath | None = None,
29
+ folder_path: str | None = None,
49
30
  ):
50
31
  """Initialize the ContextGroundingVectorStore.
51
32
 
52
33
  Args:
53
- index_name: Name of the context grounding index to use
54
- uipath_sdk: Optional SDK instance to use. If not provided, a new instance will be created.
34
+ index_name: Name of the context grounding index to use (schema name)
35
+ uipath_sdk: Optional UiPath SDK instance.
36
+ folder_path: Optional folder path for folder-scoped operations
55
37
  """
56
38
  self.index_name = index_name
57
39
  self.folder_path = folder_path
58
40
  self.sdk = uipath_sdk or UiPath()
59
41
 
42
+ # VectorStore implementation methods
43
+
44
+ @override
60
45
  def similarity_search_with_score(
61
46
  self, query: str, k: int = 4, **kwargs: Any
62
47
  ) -> list[tuple[Document, float]]:
63
48
  """Return documents most similar to the query along with the distances.
64
- The distance is 1 - score, where score is the relevance score returned by the Context Grounding API.
65
49
 
66
50
  Args:
67
51
  query: The query string
@@ -70,52 +54,24 @@ class ContextGroundingVectorStore(VectorStore):
70
54
  Returns:
71
55
  list of tuples of (document, score)
72
56
  """
73
- # Call the UiPath SDK to perform the search
74
- results = self.sdk.context_grounding.search(
75
- name=self.index_name,
76
- query=query,
77
- number_of_results=k,
78
- folder_path=self.folder_path,
79
- )
80
-
81
- # Convert the results to Documents with scores
82
- docs_with_scores = []
83
- for result in results:
84
- # Create metadata from result fields
85
- metadata = {
86
- "source": result.source,
87
- "id": result.id,
88
- "reference": result.reference,
89
- "page_number": result.page_number,
90
- "source_document_id": result.source_document_id,
91
- "caption": result.caption,
92
- }
93
-
94
- # Add any operation metadata if available
95
- if result.metadata:
96
- metadata["operation_id"] = result.metadata.operation_id
97
- metadata["strategy"] = result.metadata.strategy
98
-
99
- # Create a Document with the content and metadata
100
- doc = Document(
101
- page_content=result.content,
102
- metadata=metadata,
57
+ # Use the context grounding service to perform search
58
+ results: list[ContextGroundingQueryResponse] = (
59
+ self.sdk.context_grounding.search(
60
+ name=self.index_name,
61
+ query=query,
62
+ number_of_results=k,
63
+ folder_path=self.folder_path,
103
64
  )
65
+ )
104
66
 
105
- score = 1.0 - float(result.score)
106
-
107
- docs_with_scores.append((doc, score))
108
-
109
- return docs_with_scores
67
+ return self._convert_results_to_documents(results)
110
68
 
69
+ @override
111
70
  def similarity_search_with_relevance_scores(
112
71
  self, query: str, k: int = 4, **kwargs: Any
113
72
  ) -> list[tuple[Document, float]]:
114
73
  """Return documents along with their relevance scores on a scale from 0 to 1.
115
74
 
116
- This directly uses the scores provided by the Context Grounding API,
117
- which are already normalized between 0 and 1.
118
-
119
75
  Args:
120
76
  query: The query string
121
77
  k: Number of documents to return (default=4)
@@ -128,6 +84,7 @@ class ContextGroundingVectorStore(VectorStore):
128
84
  for doc, score in self.similarity_search_with_score(query, k, **kwargs)
129
85
  ]
130
86
 
87
+ @override
131
88
  async def asimilarity_search_with_score(
132
89
  self, query: str, k: int = 4, **kwargs: Any
133
90
  ) -> list[tuple[Document, float]]:
@@ -140,52 +97,23 @@ class ContextGroundingVectorStore(VectorStore):
140
97
  Returns:
141
98
  list of tuples of (document, score)
142
99
  """
143
- # Call the UiPath SDK to perform the search asynchronously
144
- results = await self.sdk.context_grounding.search_async(
100
+ # Use the context grounding service to perform async search
101
+ results: list[
102
+ ContextGroundingQueryResponse
103
+ ] = await self.sdk.context_grounding.search_async(
145
104
  name=self.index_name,
146
105
  query=query,
147
106
  number_of_results=k,
148
107
  folder_path=self.folder_path,
149
108
  )
150
109
 
151
- # Convert the results to Documents with scores
152
- docs_with_scores = []
153
- for result in results:
154
- # Create metadata from result fields
155
- metadata = {
156
- "source": result.source,
157
- "id": result.id,
158
- "reference": result.reference,
159
- "page_number": result.page_number,
160
- "source_document_id": result.source_document_id,
161
- "caption": result.caption,
162
- }
163
-
164
- # Add any operation metadata if available
165
- if result.metadata:
166
- metadata["operation_id"] = result.metadata.operation_id
167
- metadata["strategy"] = result.metadata.strategy
168
-
169
- # Create a Document with the content and metadata
170
- doc = Document(
171
- page_content=result.content,
172
- metadata=metadata,
173
- )
174
-
175
- # Get the distance score as 1 - ecs_score
176
- score = 1.0 - float(result.score)
177
-
178
- docs_with_scores.append((doc, score))
179
-
180
- return docs_with_scores
110
+ return self._convert_results_to_documents(results)
181
111
 
112
+ @override
182
113
  async def asimilarity_search_with_relevance_scores(
183
114
  self, query: str, k: int = 4, **kwargs: Any
184
115
  ) -> list[tuple[Document, float]]:
185
- """Asynchronously return documents along with their relevance scores on a scale from 0 to 1.
186
-
187
- This directly uses the scores provided by the Context Grounding API,
188
- which are already normalized between 0 and 1.
116
+ """Asynchronously return documents along with their relevance scores.
189
117
 
190
118
  Args:
191
119
  query: The query string
@@ -201,6 +129,7 @@ class ContextGroundingVectorStore(VectorStore):
201
129
  )
202
130
  ]
203
131
 
132
+ @override
204
133
  def similarity_search(
205
134
  self, query: str, k: int = 4, **kwargs: Any
206
135
  ) -> list[Document]:
@@ -216,6 +145,7 @@ class ContextGroundingVectorStore(VectorStore):
216
145
  docs_and_scores = self.similarity_search_with_score(query, k, **kwargs)
217
146
  return [doc for doc, _ in docs_and_scores]
218
147
 
148
+ @override
219
149
  async def asimilarity_search(
220
150
  self, query: str, k: int = 4, **kwargs: Any
221
151
  ) -> list[Document]:
@@ -231,14 +161,64 @@ class ContextGroundingVectorStore(VectorStore):
231
161
  docs_and_scores = await self.asimilarity_search_with_score(query, k, **kwargs)
232
162
  return [doc for doc, _ in docs_and_scores]
233
163
 
164
+ def _convert_results_to_documents(
165
+ self, results: list[ContextGroundingQueryResponse]
166
+ ) -> list[tuple[Document, float]]:
167
+ """Convert API results to Document objects with scores.
168
+
169
+ Args:
170
+ results: List of ContextGroundingQueryResponse objects
171
+
172
+ Returns:
173
+ List of tuples containing (Document, score)
174
+ """
175
+ docs_with_scores = []
176
+
177
+ for result in results:
178
+ # Create metadata from result fields
179
+ metadata = {}
180
+
181
+ # Add string fields with proper defaults
182
+ if result.source:
183
+ metadata["source"] = str(result.source)
184
+ if result.reference:
185
+ metadata["reference"] = str(result.reference)
186
+ if result.page_number:
187
+ metadata["page_number"] = str(result.page_number)
188
+ if result.source_document_id:
189
+ metadata["source_document_id"] = str(result.source_document_id)
190
+ if result.caption:
191
+ metadata["caption"] = str(result.caption)
192
+
193
+ # Add any operation metadata if available
194
+ if result.metadata:
195
+ if result.metadata.operation_id:
196
+ metadata["operation_id"] = str(result.metadata.operation_id)
197
+ if result.metadata.strategy:
198
+ metadata["strategy"] = str(result.metadata.strategy)
199
+
200
+ # Create a Document with the content and metadata
201
+ doc = Document(
202
+ page_content=result.content or "",
203
+ metadata=metadata,
204
+ )
205
+
206
+ # Convert score to distance (1 - score)
207
+ score = 1.0 - float(result.score or 0.0)
208
+
209
+ docs_with_scores.append((doc, score))
210
+
211
+ return docs_with_scores
212
+
234
213
  @classmethod
214
+ @override
235
215
  def from_texts(
236
- cls: type[VST],
216
+ cls,
237
217
  texts: list[str],
238
218
  embedding: Embeddings,
239
- metadatas: Optional[list[dict[str, Any]]] = None,
219
+ metadatas: list[dict[str, Any]] | None = None,
240
220
  **kwargs: Any,
241
- ) -> VST:
221
+ ) -> Self:
242
222
  """This method is required by the VectorStore abstract class, but is not supported
243
223
  by ContextGroundingVectorStore which is read-only.
244
224
 
@@ -246,15 +226,14 @@ class ContextGroundingVectorStore(VectorStore):
246
226
  NotImplementedError: This method is not supported by ContextGroundingVectorStore
247
227
  """
248
228
  raise NotImplementedError(
249
- "ContextGroundingVectorStore is a read-only wrapper for UiPath Context Grounding. "
250
- "Creating a vector store from texts is not supported."
229
+ "ContextGroundingVectorStore is a read-only wrapper for UiPath Context Grounding."
251
230
  )
252
231
 
253
- # Other required methods with minimal implementation to satisfy the interface
232
+ @override
254
233
  def add_texts(
255
234
  self,
256
235
  texts: Iterable[str],
257
- metadatas: Optional[list[dict[str, Any]]] = None,
236
+ metadatas: list[dict[str, Any]] | None = None,
258
237
  **kwargs: Any,
259
238
  ) -> list[str]:
260
239
  """Not implemented for ContextGroundingVectorStore as this is a read-only wrapper."""
@@ -262,7 +241,8 @@ class ContextGroundingVectorStore(VectorStore):
262
241
  "ContextGroundingVectorStore is a read-only wrapper for UiPath Context Grounding."
263
242
  )
264
243
 
265
- def delete(self, ids: Optional[list[str]] = None, **kwargs: Any) -> Optional[bool]:
244
+ @override
245
+ def delete(self, ids: list[str] | None = None, **kwargs: Any) -> bool | None:
266
246
  """Not implemented for ContextGroundingVectorStore as this is a read-only wrapper."""
267
247
  raise NotImplementedError(
268
248
  "ContextGroundingVectorStore is a read-only wrapper for UiPath Context Grounding."
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: uipath-langchain
3
- Version: 0.0.146
3
+ Version: 0.0.148
4
4
  Summary: UiPath Langchain
5
5
  Project-URL: Homepage, https://uipath.com
6
6
  Project-URL: Repository, https://github.com/UiPath/uipath-langchain-python
@@ -26,7 +26,7 @@ Requires-Dist: openai>=1.65.5
26
26
  Requires-Dist: openinference-instrumentation-langchain>=0.1.50
27
27
  Requires-Dist: pydantic-settings>=2.6.0
28
28
  Requires-Dist: python-dotenv>=1.0.1
29
- Requires-Dist: uipath<2.2.0,>=2.1.110
29
+ Requires-Dist: uipath<2.2.0,>=2.1.123
30
30
  Provides-Extra: langchain
31
31
  Description-Content-Type: text/markdown
32
32
 
@@ -26,18 +26,18 @@ uipath_langchain/_tracing/__init__.py,sha256=C2dRvQ2ynxCmyICgE-rJHimWKEcFRME_o9g
26
26
  uipath_langchain/_tracing/_instrument_traceable.py,sha256=8f9FyAKWE6kH1N8ErbpwqZHAzNjGwbLjQn7jdX5yAgA,4343
27
27
  uipath_langchain/_tracing/_utils.py,sha256=r_fiSk3HDDAcePY_UbbEYiSbNqzn5gFeMPYBDvGrFx0,902
28
28
  uipath_langchain/_utils/__init__.py,sha256=-w-4TD9ZnJDCpj4VIPXhJciukrmDJJbmnOFnhAkAaEU,81
29
- uipath_langchain/_utils/_request_mixin.py,sha256=_drxHTRpfyVn3g3ppKgn466EBaUWH83qyeGKLY41CGY,20142
29
+ uipath_langchain/_utils/_request_mixin.py,sha256=AqdXUuV8hHNJYUAmAoBA1sptK52H46-QcgutVPyUfao,20185
30
30
  uipath_langchain/_utils/_settings.py,sha256=2fExMQJ88YptfldmzMfZIpsx-m1gfMkeYGf5t6KIe0A,3084
31
31
  uipath_langchain/_utils/_sleep_policy.py,sha256=e9pHdjmcCj4CVoFM1jMyZFelH11YatsgWfpyrfXzKBQ,1251
32
- uipath_langchain/agent/react/__init__.py,sha256=rfVB6PQWUhPHff3J1BnPMBKBMaHfgEkII1gXwjiqUMY,272
33
- uipath_langchain/agent/react/agent.py,sha256=cU9ZiXi7EfV1pMf9nZBO8LhjYUmRsncyBk9k1pEBRh8,2455
32
+ uipath_langchain/agent/react/__init__.py,sha256=XXplWNiD9XCxMVVyE0RL8tOwJjkP9ZKU6mFR5Em0ONk,314
33
+ uipath_langchain/agent/react/agent.py,sha256=QxgeKa0kQkm7OEsVinjYMk410VHnDUdE9YQgk5Y3kug,2553
34
34
  uipath_langchain/agent/react/constants.py,sha256=B2yqryh242DETslaRYacUPbVdpjvvApjsBira_qhQwk,61
35
35
  uipath_langchain/agent/react/exceptions.py,sha256=b3lDhrIIHFljlLK3zXPznT7fYzfMRjSd8JfF4247tbI,226
36
- uipath_langchain/agent/react/init_node.py,sha256=zfPKgxi_mWsX7nBcK6wpqcDjHx8Q61TSnXFcTPLUd28,389
37
- uipath_langchain/agent/react/llm_node.py,sha256=jkbfzPNn6rNubgncPlPDQRNuk-sJbj08r95JfWxxWL8,1491
38
- uipath_langchain/agent/react/router.py,sha256=Ttq5O1_8t-z7pQ9tGhiaMmd_Da7_TWULLcgOED7gw_A,3626
39
- uipath_langchain/agent/react/state.py,sha256=EnkGXFlmMtJUy7BTZrbYlGBvvAZ70_HwKPW8n6uwjz0,330
40
- uipath_langchain/agent/react/terminate_node.py,sha256=Uuc-0z4qcPjHB_qZlaEaM2mK1ymCuJJludS7LopyCZg,1898
36
+ uipath_langchain/agent/react/init_node.py,sha256=QhJQSqiM_qlQVsdoZDn_HxV4pl3-rO0Lk9pMlHrqZlw,389
37
+ uipath_langchain/agent/react/llm_node.py,sha256=KzawaVkIWMztf9-LXLyUO7ZaXyuxVQbMusu8NpIHzhI,1491
38
+ uipath_langchain/agent/react/router.py,sha256=PKP0-cJ_xP0Q42zN2MZpIHu4WSletSmQkFJe6QooJpY,3626
39
+ uipath_langchain/agent/react/terminate_node.py,sha256=Gz_-R2LxvesocTZ6x6rIfV5FXiycmT1Fv5oXc2LpW5g,1898
40
+ uipath_langchain/agent/react/types.py,sha256=oPUHckNdGT5n7FhAp5hZ7aNkJKX9Yfzvfvi_h2nKSZw,529
41
41
  uipath_langchain/agent/react/utils.py,sha256=0kZoEkGzddtTZSlGQcqbaPHH5MVtZegq0kBI5_vreGA,1060
42
42
  uipath_langchain/agent/react/tools/__init__.py,sha256=LGfG8Dc32ffKdXQyMI2oYzhNnTs1wbzsddXz6eU-0MY,102
43
43
  uipath_langchain/agent/react/tools/tools.py,sha256=vFBGnFrGocX__sotKisMJr2lxRRVqA0-uThzzhPADIw,1443
@@ -53,12 +53,10 @@ uipath_langchain/embeddings/__init__.py,sha256=QICtYB58ZyqFfDQrEaO8lTEgAU5NuEKlR
53
53
  uipath_langchain/embeddings/embeddings.py,sha256=45gKyb6HVKigwE-0CXeZcAk33c0mteaEdPGa8hviqcw,4339
54
54
  uipath_langchain/retrievers/__init__.py,sha256=rOn7PyyHgZ4pMnXWPkGqmuBmx8eGuo-Oyndo7Wm9IUU,108
55
55
  uipath_langchain/retrievers/context_grounding_retriever.py,sha256=YLCIwy89LhLnNqcM0YJ5mZoeNyCs5UiKD3Wly8gnW1E,2239
56
- uipath_langchain/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
- uipath_langchain/tools/preconfigured.py,sha256=SyvrLrM1kezZxVVytgScVO8nBfVYfFGobWjY7erzsYU,7490
58
56
  uipath_langchain/vectorstores/__init__.py,sha256=w8qs1P548ud1aIcVA_QhBgf_jZDrRMK5Lono78yA8cs,114
59
- uipath_langchain/vectorstores/context_grounding_vectorstore.py,sha256=TncIXG-YsUlO0R5ZYzWsM-Dj1SVCZbzmo2LraVxXelc,9559
60
- uipath_langchain-0.0.146.dist-info/METADATA,sha256=7u2FwmZLCfShij8MzNjH3-B3pVxqerwV99nGRAwVGSM,4276
61
- uipath_langchain-0.0.146.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
62
- uipath_langchain-0.0.146.dist-info/entry_points.txt,sha256=FUtzqGOEntlJKMJIXhQUfT7ZTbQmGhke1iCmDWZaQZI,81
63
- uipath_langchain-0.0.146.dist-info/licenses/LICENSE,sha256=JDpt-uotAkHFmxpwxi6gwx6HQ25e-lG4U_Gzcvgp7JY,1063
64
- uipath_langchain-0.0.146.dist-info/RECORD,,
57
+ uipath_langchain/vectorstores/context_grounding_vectorstore.py,sha256=E0iuDBMAOl50Bdhl3YxywR0CngH1I99mkmfMh3byMFY,8396
58
+ uipath_langchain-0.0.148.dist-info/METADATA,sha256=ucOXQNU19PUpVFPtiK3VrjAkGtx-jgXC8RkLtZspgoA,4276
59
+ uipath_langchain-0.0.148.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
60
+ uipath_langchain-0.0.148.dist-info/entry_points.txt,sha256=FUtzqGOEntlJKMJIXhQUfT7ZTbQmGhke1iCmDWZaQZI,81
61
+ uipath_langchain-0.0.148.dist-info/licenses/LICENSE,sha256=JDpt-uotAkHFmxpwxi6gwx6HQ25e-lG4U_Gzcvgp7JY,1063
62
+ uipath_langchain-0.0.148.dist-info/RECORD,,
File without changes
@@ -1,220 +0,0 @@
1
- import json
2
- import logging
3
- from typing import Any, Iterable, Optional, Type
4
-
5
- import httpx
6
- from jsonschema_pydantic import jsonschema_to_pydantic as create_model # type: ignore
7
- from langchain_core.caches import BaseCache
8
- from langchain_core.runnables.utils import Output
9
- from langchain_core.tools import BaseTool, StructuredTool
10
- from langgraph.types import interrupt
11
- from pydantic import BaseModel
12
- from uipath import UiPath
13
- from uipath.agent.models.agent import (
14
- AgentEscalationChannel,
15
- AgentEscalationResourceConfig,
16
- AgentIntegrationToolParameter,
17
- AgentIntegrationToolResourceConfig,
18
- AgentProcessToolResourceConfig,
19
- AgentResourceConfig,
20
- LowCodeAgentDefinition,
21
- )
22
- from uipath.models import CreateAction, InvokeProcess
23
- from uipath.models.connections import ConnectionTokenType
24
-
25
- logger = logging.getLogger(__name__)
26
-
27
-
28
- def create_process_tool(resource: AgentProcessToolResourceConfig) -> Iterable[BaseTool]:
29
- async def process(**kwargs) -> BaseModel:
30
- return interrupt(
31
- InvokeProcess(
32
- name=resource.name,
33
- input_arguments=kwargs,
34
- process_folder_path=resource.properties.folder_path,
35
- )
36
- )
37
-
38
- input_schema = create_model(resource.input_schema)
39
-
40
- class ProcessTool(StructuredTool):
41
- @property
42
- def OutputType(self) -> type[Output]:
43
- return create_model(resource.output_schema)
44
-
45
- yield ProcessTool(
46
- name=resource.name,
47
- args_schema=input_schema,
48
- description=resource.description,
49
- coroutine=process,
50
- )
51
-
52
-
53
- def create_escalation_tool_from_channel(channel: AgentEscalationChannel) -> BaseTool:
54
- async def escalate(**kwargs) -> BaseModel:
55
- recipients = channel.recipients
56
- if len(recipients) > 1:
57
- logger.warning(
58
- "Received more than one recipient. Defaulting to first recipient."
59
- )
60
- assignee = recipients[0].value if recipients else None
61
- return interrupt(
62
- CreateAction(
63
- title=channel.description,
64
- data=kwargs,
65
- assignee=assignee,
66
- app_name=channel.properties.app_name,
67
- app_folder_path=None, # Channels specify folder name but not folder path.
68
- app_folder_key=channel.properties.resource_key,
69
- app_key=channel.properties.resource_key,
70
- app_version=channel.properties.app_version,
71
- )
72
- )
73
-
74
- input_schema = create_model(channel.input_schema)
75
-
76
- class EscalationTool(StructuredTool):
77
- @property
78
- def OutputType(self) -> type[Output]:
79
- return create_model(channel.output_schema)
80
-
81
- return EscalationTool(
82
- name=channel.name,
83
- args_schema=input_schema,
84
- description=channel.description,
85
- coroutine=escalate,
86
- )
87
-
88
-
89
- def create_escalation_tool(
90
- resource: AgentEscalationResourceConfig,
91
- ) -> Iterable[BaseTool]:
92
- for channel in resource.channels:
93
- yield create_escalation_tool_from_channel(channel)
94
-
95
-
96
- METHOD_MAP = {"GETBYID": "GET"}
97
-
98
-
99
- def build_query_params(parameters: list[AgentIntegrationToolParameter]):
100
- query_params = [
101
- x for x in parameters if x.field_location == "query" and x.value is not None
102
- ]
103
- if query_params:
104
- return "?" + "&".join(f"{q.name}={q.value}" for q in query_params)
105
- return ""
106
-
107
-
108
- def filter_query_params(
109
- kwargs: dict[str, Any], parameters: list[AgentIntegrationToolParameter]
110
- ):
111
- query_params = {x.name for x in parameters if x.field_location == "query"}
112
- non_query_params = {x.name for x in parameters if x.field_location != "query"}
113
- fields_to_ignore = query_params - non_query_params
114
- return {k: v for k, v in kwargs.items() if k not in fields_to_ignore}
115
-
116
-
117
- def create_integration_tool(
118
- resource: AgentIntegrationToolResourceConfig,
119
- ) -> Iterable[BaseTool]:
120
- async def integration(**kwargs) -> BaseModel:
121
- uipath = UiPath()
122
- remote_connection = await uipath.connections.retrieve_async(
123
- resource.properties.connection.id
124
- )
125
- token = await uipath.connections.retrieve_token_async(
126
- resource.properties.connection.id, ConnectionTokenType.BEARER
127
- )
128
- tool_url = f"{remote_connection.api_base_uri}/v3/element/instances/{remote_connection.element_instance_id}{resource.properties.tool_path}"
129
- tool_url = f"{tool_url}{build_query_params(resource.properties.parameters)}"
130
- tool_url = tool_url.format(**kwargs)
131
-
132
- authorization = f"{token.token_type} {token.access_token}"
133
- method = METHOD_MAP.get(resource.properties.method, resource.properties.method)
134
- response = await httpx.AsyncClient().request(
135
- method,
136
- tool_url,
137
- headers={"Authorization": authorization},
138
- content=json.dumps(
139
- filter_query_params(kwargs, resource.properties.parameters)
140
- ),
141
- )
142
- return response.json()
143
-
144
- input_schema = create_model(resource.input_schema)
145
-
146
- class IntegrationTool(StructuredTool):
147
- @property
148
- def OutputType(self) -> type[Output]:
149
- return create_model({})
150
-
151
- yield IntegrationTool(
152
- name=resource.name,
153
- args_schema=input_schema,
154
- description=resource.description,
155
- coroutine=integration,
156
- )
157
-
158
-
159
- def create_cached_wrapper_from_tool(
160
- wrapped: BaseTool, cache: Optional[BaseCache]
161
- ) -> BaseTool:
162
- if cache is None:
163
- return wrapped
164
- else:
165
-
166
- async def cached_invocation(**kwargs) -> BaseModel:
167
- namespace = f"{wrapped.name}.tool_invoke"
168
- key = str(kwargs)
169
- cached = cache.lookup(namespace, key)
170
- if cached:
171
- return cached[0]
172
- response = await wrapped.ainvoke(input=kwargs)
173
- cache.update(namespace, key, [response])
174
- return response
175
-
176
- input_schema = wrapped.args_schema
177
-
178
- class CachedTool(StructuredTool):
179
- OutputType: Type[BaseModel] = wrapped.OutputType
180
-
181
- return CachedTool(
182
- name=wrapped.name,
183
- args_schema=input_schema,
184
- description=wrapped.description,
185
- coroutine=cached_invocation,
186
- )
187
-
188
-
189
- def create_cached_wrapper(
190
- tools: Iterable[BaseTool], cache: Optional[BaseCache]
191
- ) -> Iterable[BaseTool]:
192
- for wrapped in tools:
193
- yield create_cached_wrapper_from_tool(wrapped, cache)
194
-
195
-
196
- def create_resource_tool(
197
- resource: AgentResourceConfig, cache: Optional[BaseCache] = None
198
- ) -> Iterable[BaseTool]:
199
- match resource:
200
- case AgentProcessToolResourceConfig():
201
- return create_cached_wrapper(create_process_tool(resource), cache)
202
- case AgentIntegrationToolResourceConfig():
203
- return create_cached_wrapper(create_integration_tool(resource), cache)
204
- case AgentEscalationResourceConfig():
205
- return create_cached_wrapper(create_escalation_tool(resource), cache)
206
- case _:
207
- raise NotImplementedError()
208
-
209
-
210
- def safe_extract_tools(
211
- agent_definition: LowCodeAgentDefinition, cache: Optional[BaseCache] = None
212
- ) -> list[BaseTool]:
213
- tools = []
214
- for resource in agent_definition.resources:
215
- try:
216
- for structured_tool in create_resource_tool(resource, cache):
217
- tools.append(structured_tool)
218
- except NotImplementedError:
219
- logger.warning(f"Unable to convert {resource.name} into a tool.")
220
- return tools