uipath-langchain 0.0.112__py3-none-any.whl → 0.1.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uipath_langchain/_cli/_templates/main.py.template +12 -13
- uipath_langchain/_cli/cli_init.py +127 -156
- uipath_langchain/_cli/cli_new.py +2 -6
- uipath_langchain/_resources/AGENTS.md +21 -0
- uipath_langchain/_resources/REQUIRED_STRUCTURE.md +92 -0
- uipath_langchain/{tracers → _tracing}/__init__.py +0 -2
- uipath_langchain/_tracing/_instrument_traceable.py +134 -0
- uipath_langchain/_utils/__init__.py +1 -2
- uipath_langchain/_utils/_request_mixin.py +351 -54
- uipath_langchain/_utils/_settings.py +2 -11
- uipath_langchain/agent/exceptions/__init__.py +6 -0
- uipath_langchain/agent/exceptions/exceptions.py +11 -0
- uipath_langchain/agent/guardrails/__init__.py +21 -0
- uipath_langchain/agent/guardrails/actions/__init__.py +11 -0
- uipath_langchain/agent/guardrails/actions/base_action.py +23 -0
- uipath_langchain/agent/guardrails/actions/block_action.py +41 -0
- uipath_langchain/agent/guardrails/actions/escalate_action.py +274 -0
- uipath_langchain/agent/guardrails/actions/log_action.py +57 -0
- uipath_langchain/agent/guardrails/guardrail_nodes.py +125 -0
- uipath_langchain/agent/guardrails/guardrails_factory.py +70 -0
- uipath_langchain/agent/guardrails/guardrails_subgraph.py +247 -0
- uipath_langchain/agent/guardrails/types.py +20 -0
- uipath_langchain/agent/react/__init__.py +14 -0
- uipath_langchain/agent/react/agent.py +113 -0
- uipath_langchain/agent/react/constants.py +2 -0
- uipath_langchain/agent/react/init_node.py +20 -0
- uipath_langchain/agent/react/llm_node.py +43 -0
- uipath_langchain/agent/react/router.py +97 -0
- uipath_langchain/agent/react/terminate_node.py +82 -0
- uipath_langchain/agent/react/tools/__init__.py +7 -0
- uipath_langchain/agent/react/tools/tools.py +50 -0
- uipath_langchain/agent/react/types.py +39 -0
- uipath_langchain/agent/react/utils.py +49 -0
- uipath_langchain/agent/tools/__init__.py +17 -0
- uipath_langchain/agent/tools/context_tool.py +53 -0
- uipath_langchain/agent/tools/escalation_tool.py +111 -0
- uipath_langchain/agent/tools/integration_tool.py +181 -0
- uipath_langchain/agent/tools/process_tool.py +49 -0
- uipath_langchain/agent/tools/static_args.py +138 -0
- uipath_langchain/agent/tools/structured_tool_with_output_type.py +14 -0
- uipath_langchain/agent/tools/tool_factory.py +45 -0
- uipath_langchain/agent/tools/tool_node.py +22 -0
- uipath_langchain/agent/tools/utils.py +11 -0
- uipath_langchain/chat/__init__.py +4 -0
- uipath_langchain/chat/bedrock.py +187 -0
- uipath_langchain/chat/gemini.py +330 -0
- uipath_langchain/chat/mapper.py +309 -0
- uipath_langchain/chat/models.py +261 -38
- uipath_langchain/chat/openai.py +132 -0
- uipath_langchain/chat/supported_models.py +42 -0
- uipath_langchain/embeddings/embeddings.py +136 -36
- uipath_langchain/middlewares.py +0 -2
- uipath_langchain/py.typed +0 -0
- uipath_langchain/retrievers/context_grounding_retriever.py +7 -9
- uipath_langchain/runtime/__init__.py +36 -0
- uipath_langchain/runtime/_serialize.py +46 -0
- uipath_langchain/runtime/config.py +61 -0
- uipath_langchain/runtime/errors.py +43 -0
- uipath_langchain/runtime/factory.py +315 -0
- uipath_langchain/runtime/graph.py +159 -0
- uipath_langchain/runtime/runtime.py +453 -0
- uipath_langchain/runtime/schema.py +349 -0
- uipath_langchain/runtime/storage.py +115 -0
- uipath_langchain/vectorstores/context_grounding_vectorstore.py +90 -110
- {uipath_langchain-0.0.112.dist-info → uipath_langchain-0.1.24.dist-info}/METADATA +42 -20
- uipath_langchain-0.1.24.dist-info/RECORD +76 -0
- {uipath_langchain-0.0.112.dist-info → uipath_langchain-0.1.24.dist-info}/WHEEL +1 -1
- uipath_langchain-0.1.24.dist-info/entry_points.txt +5 -0
- uipath_langchain/_cli/_runtime/_context.py +0 -21
- uipath_langchain/_cli/_runtime/_exception.py +0 -17
- uipath_langchain/_cli/_runtime/_input.py +0 -136
- uipath_langchain/_cli/_runtime/_output.py +0 -234
- uipath_langchain/_cli/_runtime/_runtime.py +0 -371
- uipath_langchain/_cli/_utils/_graph.py +0 -202
- uipath_langchain/_cli/cli_run.py +0 -80
- uipath_langchain/tracers/AsyncUiPathTracer.py +0 -274
- uipath_langchain/tracers/_events.py +0 -33
- uipath_langchain/tracers/_instrument_traceable.py +0 -416
- uipath_langchain/tracers/_utils.py +0 -52
- uipath_langchain-0.0.112.dist-info/RECORD +0 -36
- uipath_langchain-0.0.112.dist-info/entry_points.txt +0 -2
- {uipath_langchain-0.0.112.dist-info → uipath_langchain-0.1.24.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,19 +1,24 @@
|
|
|
1
1
|
import os
|
|
2
|
-
from typing import
|
|
2
|
+
from typing import Any
|
|
3
3
|
|
|
4
4
|
import httpx
|
|
5
|
-
from langchain_community.callbacks.manager import openai_callback_var
|
|
6
5
|
from langchain_openai.embeddings import AzureOpenAIEmbeddings, OpenAIEmbeddings
|
|
7
6
|
from pydantic import Field
|
|
7
|
+
from uipath.utils import EndpointManager
|
|
8
8
|
|
|
9
9
|
from uipath_langchain._utils._request_mixin import UiPathRequestMixin
|
|
10
|
-
from uipath_langchain._utils._settings import UiPathEndpoints
|
|
11
10
|
|
|
12
11
|
|
|
13
12
|
class UiPathAzureOpenAIEmbeddings(UiPathRequestMixin, AzureOpenAIEmbeddings):
|
|
14
|
-
"""Custom Embeddings connector for LangChain integration with UiPath
|
|
13
|
+
"""Custom Embeddings connector for LangChain integration with UiPath.
|
|
15
14
|
|
|
16
|
-
|
|
15
|
+
This class modifies the OpenAI client to:
|
|
16
|
+
- Use UiPath endpoints
|
|
17
|
+
- Log request/response durations
|
|
18
|
+
- Apply custom URL preparation and header building
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
model_name: str | None = Field(
|
|
17
22
|
default_factory=lambda: os.getenv(
|
|
18
23
|
"UIPATH_MODEL_NAME", "text-embedding-3-large"
|
|
19
24
|
),
|
|
@@ -36,6 +41,7 @@ class UiPathAzureOpenAIEmbeddings(UiPathRequestMixin, AzureOpenAIEmbeddings):
|
|
|
36
41
|
),
|
|
37
42
|
**kwargs,
|
|
38
43
|
)
|
|
44
|
+
# Monkey-patch the OpenAI client to use your custom methods
|
|
39
45
|
self.client._client._prepare_url = self._prepare_url
|
|
40
46
|
self.client._client._build_headers = self._build_headers
|
|
41
47
|
self.async_client._client._prepare_url = self._prepare_url
|
|
@@ -43,67 +49,161 @@ class UiPathAzureOpenAIEmbeddings(UiPathRequestMixin, AzureOpenAIEmbeddings):
|
|
|
43
49
|
|
|
44
50
|
@property
|
|
45
51
|
def endpoint(self) -> str:
|
|
46
|
-
|
|
52
|
+
endpoint = EndpointManager.get_embeddings_endpoint()
|
|
53
|
+
return endpoint.format(
|
|
47
54
|
model=self.model_name, api_version=self.openai_api_version
|
|
48
55
|
)
|
|
49
56
|
|
|
50
57
|
|
|
51
58
|
class UiPathOpenAIEmbeddings(UiPathRequestMixin, OpenAIEmbeddings):
|
|
52
|
-
"""Custom Embeddings connector for LangChain integration with UiPath
|
|
59
|
+
"""Custom Embeddings connector for LangChain integration with UiPath.
|
|
60
|
+
|
|
61
|
+
This implementation uses custom _call and _acall methods for full control
|
|
62
|
+
over the API request/response cycle.
|
|
63
|
+
"""
|
|
53
64
|
|
|
54
|
-
model_name:
|
|
65
|
+
model_name: str | None = Field(
|
|
55
66
|
default_factory=lambda: os.getenv(
|
|
56
67
|
"UIPATH_MODEL_NAME", "text-embedding-3-large"
|
|
57
68
|
),
|
|
58
69
|
alias="model",
|
|
59
70
|
)
|
|
60
71
|
|
|
72
|
+
# Add instance variables for tracking if needed
|
|
73
|
+
def __init__(self, **kwargs):
|
|
74
|
+
super().__init__(**kwargs)
|
|
75
|
+
self._total_tokens = 0
|
|
76
|
+
self._total_requests = 0
|
|
77
|
+
|
|
61
78
|
def embed_documents(
|
|
62
|
-
self, texts:
|
|
63
|
-
) ->
|
|
64
|
-
"""Embed a list of documents using
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
79
|
+
self, texts: list[str], chunk_size: int | None = None, **kwargs: Any
|
|
80
|
+
) -> list[list[float]]:
|
|
81
|
+
"""Embed a list of documents using UiPath endpoint.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
texts: List of texts to embed
|
|
85
|
+
chunk_size: Number of texts to process in each batch
|
|
86
|
+
**kwargs: Additional arguments passed to the API
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
List of embeddings for each text
|
|
90
|
+
"""
|
|
91
|
+
chunk_size_ = chunk_size or self.chunk_size
|
|
92
|
+
embeddings: list[list[float]] = []
|
|
93
|
+
|
|
69
94
|
for i in range(0, len(texts), chunk_size_):
|
|
70
95
|
chunk = texts[i : i + chunk_size_]
|
|
71
|
-
|
|
96
|
+
|
|
97
|
+
# Build payload matching OpenAI API format
|
|
98
|
+
payload: dict[str, Any] = {
|
|
99
|
+
"input": chunk,
|
|
100
|
+
"model": self.model,
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
# Add optional parameters
|
|
104
|
+
if self.dimensions is not None:
|
|
105
|
+
payload["dimensions"] = self.dimensions
|
|
106
|
+
|
|
107
|
+
# Add model_kwargs and any additional kwargs
|
|
108
|
+
payload.update(self.model_kwargs)
|
|
109
|
+
payload.update(kwargs)
|
|
110
|
+
|
|
111
|
+
# Make the API call using custom _call method
|
|
72
112
|
response = self._call(self.url, payload, self.auth_headers)
|
|
113
|
+
|
|
114
|
+
# Extract embeddings
|
|
73
115
|
chunk_embeddings = [r["embedding"] for r in response["data"]]
|
|
74
|
-
total_tokens += response["usage"]["prompt_tokens"]
|
|
75
116
|
embeddings.extend(chunk_embeddings)
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
117
|
+
|
|
118
|
+
# Track usage internally (optional)
|
|
119
|
+
if "usage" in response:
|
|
120
|
+
self._total_tokens += response["usage"].get("total_tokens", 0)
|
|
121
|
+
self._total_requests += 1
|
|
122
|
+
|
|
80
123
|
return embeddings
|
|
81
124
|
|
|
82
125
|
async def aembed_documents(
|
|
83
126
|
self,
|
|
84
|
-
texts:
|
|
85
|
-
chunk_size:
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
127
|
+
texts: list[str],
|
|
128
|
+
chunk_size: int | None = None,
|
|
129
|
+
**kwargs: Any,
|
|
130
|
+
) -> list[list[float]]:
|
|
131
|
+
"""Async version of embed_documents.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
texts: List of texts to embed
|
|
135
|
+
chunk_size: Number of texts to process in each batch
|
|
136
|
+
**kwargs: Additional arguments passed to the API
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
List of embeddings for each text
|
|
140
|
+
"""
|
|
141
|
+
chunk_size_ = chunk_size or self.chunk_size
|
|
142
|
+
embeddings: list[list[float]] = []
|
|
143
|
+
|
|
92
144
|
for i in range(0, len(texts), chunk_size_):
|
|
93
145
|
chunk = texts[i : i + chunk_size_]
|
|
94
|
-
|
|
146
|
+
|
|
147
|
+
# Build payload matching OpenAI API format
|
|
148
|
+
payload: dict[str, Any] = {
|
|
149
|
+
"input": chunk,
|
|
150
|
+
"model": self.model,
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
# Add optional parameters
|
|
154
|
+
if self.dimensions is not None:
|
|
155
|
+
payload["dimensions"] = self.dimensions
|
|
156
|
+
|
|
157
|
+
# Add model_kwargs and any additional kwargs
|
|
158
|
+
payload.update(self.model_kwargs)
|
|
159
|
+
payload.update(kwargs)
|
|
160
|
+
|
|
161
|
+
# Make the async API call using custom _acall method
|
|
95
162
|
response = await self._acall(self.url, payload, self.auth_headers)
|
|
163
|
+
|
|
164
|
+
# Extract embeddings
|
|
96
165
|
chunk_embeddings = [r["embedding"] for r in response["data"]]
|
|
97
|
-
total_tokens += response["usage"]["prompt_tokens"]
|
|
98
166
|
embeddings.extend(chunk_embeddings)
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
167
|
+
|
|
168
|
+
# Track usage internally (optional)
|
|
169
|
+
if "usage" in response:
|
|
170
|
+
self._total_tokens += response["usage"].get("total_tokens", 0)
|
|
171
|
+
self._total_requests += 1
|
|
172
|
+
|
|
103
173
|
return embeddings
|
|
104
174
|
|
|
105
175
|
@property
|
|
106
176
|
def endpoint(self) -> str:
|
|
107
|
-
|
|
177
|
+
"""Get the UiPath endpoint for embeddings."""
|
|
178
|
+
endpoint = EndpointManager.get_embeddings_endpoint()
|
|
179
|
+
return endpoint.format(
|
|
108
180
|
model=self.model_name, api_version=self.openai_api_version
|
|
109
181
|
)
|
|
182
|
+
|
|
183
|
+
@property
|
|
184
|
+
def url(self) -> str:
|
|
185
|
+
"""Get the full URL for API requests."""
|
|
186
|
+
return self.endpoint
|
|
187
|
+
|
|
188
|
+
@property
|
|
189
|
+
def auth_headers(self) -> dict[str, str]:
|
|
190
|
+
"""Get authentication headers for API requests."""
|
|
191
|
+
headers = {}
|
|
192
|
+
if self.openai_api_key:
|
|
193
|
+
headers["Authorization"] = (
|
|
194
|
+
f"Bearer {self.openai_api_key.get_secret_value()}"
|
|
195
|
+
)
|
|
196
|
+
if self.default_headers:
|
|
197
|
+
headers.update(self.default_headers)
|
|
198
|
+
return headers
|
|
199
|
+
|
|
200
|
+
def get_usage_stats(self) -> dict[str, int]:
|
|
201
|
+
"""Get token usage statistics.
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
Dictionary with total_tokens and total_requests
|
|
205
|
+
"""
|
|
206
|
+
return {
|
|
207
|
+
"total_tokens": self._total_tokens,
|
|
208
|
+
"total_requests": self._total_requests,
|
|
209
|
+
}
|
uipath_langchain/middlewares.py
CHANGED
|
@@ -2,11 +2,9 @@ from uipath._cli.middlewares import Middlewares
|
|
|
2
2
|
|
|
3
3
|
from ._cli.cli_init import langgraph_init_middleware
|
|
4
4
|
from ._cli.cli_new import langgraph_new_middleware
|
|
5
|
-
from ._cli.cli_run import langgraph_run_middleware
|
|
6
5
|
|
|
7
6
|
|
|
8
7
|
def register_middleware():
|
|
9
8
|
"""This function will be called by the entry point system when uipath_langchain is installed"""
|
|
10
9
|
Middlewares.register("init", langgraph_init_middleware)
|
|
11
|
-
Middlewares.register("run", langgraph_run_middleware)
|
|
12
10
|
Middlewares.register("new", langgraph_new_middleware)
|
|
File without changes
|
|
@@ -1,24 +1,22 @@
|
|
|
1
|
-
from typing import List, Optional
|
|
2
|
-
|
|
3
1
|
from langchain_core.callbacks import (
|
|
4
2
|
AsyncCallbackManagerForRetrieverRun,
|
|
5
3
|
CallbackManagerForRetrieverRun,
|
|
6
4
|
)
|
|
7
5
|
from langchain_core.documents import Document
|
|
8
6
|
from langchain_core.retrievers import BaseRetriever
|
|
9
|
-
from uipath import UiPath
|
|
7
|
+
from uipath.platform import UiPath
|
|
10
8
|
|
|
11
9
|
|
|
12
10
|
class ContextGroundingRetriever(BaseRetriever):
|
|
13
11
|
index_name: str
|
|
14
|
-
folder_path:
|
|
15
|
-
folder_key:
|
|
16
|
-
uipath_sdk:
|
|
17
|
-
number_of_results:
|
|
12
|
+
folder_path: str | None = None
|
|
13
|
+
folder_key: str | None = None
|
|
14
|
+
uipath_sdk: UiPath | None = None
|
|
15
|
+
number_of_results: int | None = 10
|
|
18
16
|
|
|
19
17
|
def _get_relevant_documents(
|
|
20
18
|
self, query: str, *, run_manager: CallbackManagerForRetrieverRun
|
|
21
|
-
) ->
|
|
19
|
+
) -> list[Document]:
|
|
22
20
|
"""Sync implementations for retriever calls context_grounding API to search the requested index."""
|
|
23
21
|
|
|
24
22
|
sdk = self.uipath_sdk if self.uipath_sdk is not None else UiPath()
|
|
@@ -43,7 +41,7 @@ class ContextGroundingRetriever(BaseRetriever):
|
|
|
43
41
|
|
|
44
42
|
async def _aget_relevant_documents(
|
|
45
43
|
self, query: str, *, run_manager: AsyncCallbackManagerForRetrieverRun
|
|
46
|
-
) ->
|
|
44
|
+
) -> list[Document]:
|
|
47
45
|
"""Async implementations for retriever calls context_grounding API to search the requested index."""
|
|
48
46
|
|
|
49
47
|
sdk = self.uipath_sdk if self.uipath_sdk is not None else UiPath()
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from uipath.runtime import (
|
|
2
|
+
UiPathRuntimeContext,
|
|
3
|
+
UiPathRuntimeFactoryProtocol,
|
|
4
|
+
UiPathRuntimeFactoryRegistry,
|
|
5
|
+
)
|
|
6
|
+
|
|
7
|
+
from uipath_langchain.runtime.factory import UiPathLangGraphRuntimeFactory
|
|
8
|
+
from uipath_langchain.runtime.runtime import UiPathLangGraphRuntime
|
|
9
|
+
from uipath_langchain.runtime.schema import (
|
|
10
|
+
get_entrypoints_schema,
|
|
11
|
+
get_graph_schema,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def register_runtime_factory() -> None:
|
|
16
|
+
"""Register the LangGraph factory. Called automatically via entry point."""
|
|
17
|
+
|
|
18
|
+
def create_factory(
|
|
19
|
+
context: UiPathRuntimeContext | None = None,
|
|
20
|
+
) -> UiPathRuntimeFactoryProtocol:
|
|
21
|
+
return UiPathLangGraphRuntimeFactory(
|
|
22
|
+
context=context if context else UiPathRuntimeContext(),
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
UiPathRuntimeFactoryRegistry.register("langgraph", create_factory, "langgraph.json")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
register_runtime_factory()
|
|
29
|
+
|
|
30
|
+
__all__ = [
|
|
31
|
+
"register_runtime_factory",
|
|
32
|
+
"get_entrypoints_schema",
|
|
33
|
+
"get_graph_schema",
|
|
34
|
+
"UiPathLangGraphRuntimeFactory",
|
|
35
|
+
"UiPathLangGraphRuntime",
|
|
36
|
+
]
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def serialize_output(output: Any) -> Any:
|
|
6
|
+
"""
|
|
7
|
+
Recursively serialize an output object.
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
output: The object to serialize
|
|
11
|
+
|
|
12
|
+
Returns:
|
|
13
|
+
Dict[str, Any]: Serialized output as dictionary
|
|
14
|
+
"""
|
|
15
|
+
if output is None:
|
|
16
|
+
return {}
|
|
17
|
+
|
|
18
|
+
# Handle Pydantic models
|
|
19
|
+
if hasattr(output, "model_dump"):
|
|
20
|
+
return serialize_output(output.model_dump(by_alias=True))
|
|
21
|
+
elif hasattr(output, "dict"):
|
|
22
|
+
return serialize_output(output.dict())
|
|
23
|
+
elif hasattr(output, "to_dict"):
|
|
24
|
+
return serialize_output(output.to_dict())
|
|
25
|
+
|
|
26
|
+
# Handle dictionaries
|
|
27
|
+
elif isinstance(output, dict):
|
|
28
|
+
return {k: serialize_output(v) for k, v in output.items()}
|
|
29
|
+
|
|
30
|
+
# Handle lists
|
|
31
|
+
elif isinstance(output, list):
|
|
32
|
+
return [serialize_output(item) for item in output]
|
|
33
|
+
|
|
34
|
+
# Handle other iterables (convert to dict first)
|
|
35
|
+
elif hasattr(output, "__iter__") and not isinstance(output, (str, bytes)):
|
|
36
|
+
try:
|
|
37
|
+
return serialize_output(dict(output))
|
|
38
|
+
except (TypeError, ValueError):
|
|
39
|
+
return output
|
|
40
|
+
|
|
41
|
+
# Handle Enums
|
|
42
|
+
elif isinstance(output, Enum):
|
|
43
|
+
return output.value
|
|
44
|
+
|
|
45
|
+
# Return primitive types as is
|
|
46
|
+
return output
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"""Simple loader for langgraph.json configuration."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class LangGraphConfig:
|
|
8
|
+
"""Simple loader for langgraph.json configuration."""
|
|
9
|
+
|
|
10
|
+
def __init__(self, config_path: str = "langgraph.json"):
|
|
11
|
+
"""
|
|
12
|
+
Initialize configuration loader.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
config_path: Path to langgraph.json file
|
|
16
|
+
"""
|
|
17
|
+
self.config_path = config_path
|
|
18
|
+
self._graphs: dict[str, str] | None = None
|
|
19
|
+
|
|
20
|
+
@property
|
|
21
|
+
def exists(self) -> bool:
|
|
22
|
+
"""Check if langgraph.json exists."""
|
|
23
|
+
return os.path.exists(self.config_path)
|
|
24
|
+
|
|
25
|
+
@property
|
|
26
|
+
def graphs(self) -> dict[str, str]:
|
|
27
|
+
"""
|
|
28
|
+
Get graph name -> path mapping from config.
|
|
29
|
+
|
|
30
|
+
Returns:
|
|
31
|
+
Dictionary mapping graph names to file paths (e.g., {"agent": "agent.py:graph"})
|
|
32
|
+
"""
|
|
33
|
+
if self._graphs is None:
|
|
34
|
+
self._graphs = self._load_graphs()
|
|
35
|
+
return self._graphs
|
|
36
|
+
|
|
37
|
+
def _load_graphs(self) -> dict[str, str]:
|
|
38
|
+
"""Load graph definitions from langgraph.json."""
|
|
39
|
+
if not self.exists:
|
|
40
|
+
raise FileNotFoundError(f"Config file not found: {self.config_path}")
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
with open(self.config_path, "r") as f:
|
|
44
|
+
config = json.load(f)
|
|
45
|
+
|
|
46
|
+
if "graphs" not in config:
|
|
47
|
+
raise ValueError("Missing required 'graphs' field in langgraph.json")
|
|
48
|
+
|
|
49
|
+
graphs = config["graphs"]
|
|
50
|
+
if not isinstance(graphs, dict):
|
|
51
|
+
raise ValueError("'graphs' must be a dictionary")
|
|
52
|
+
|
|
53
|
+
return graphs
|
|
54
|
+
|
|
55
|
+
except json.JSONDecodeError as e:
|
|
56
|
+
raise ValueError(f"Invalid JSON in {self.config_path}: {e}") from e
|
|
57
|
+
|
|
58
|
+
@property
|
|
59
|
+
def entrypoints(self) -> list[str]:
|
|
60
|
+
"""Get list of available graph entrypoints."""
|
|
61
|
+
return list(self.graphs.keys())
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Union
|
|
3
|
+
|
|
4
|
+
from uipath.runtime.errors import (
|
|
5
|
+
UiPathBaseRuntimeError,
|
|
6
|
+
UiPathErrorCategory,
|
|
7
|
+
UiPathErrorCode,
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class LangGraphErrorCode(Enum):
|
|
12
|
+
CONFIG_MISSING = "CONFIG_MISSING"
|
|
13
|
+
CONFIG_INVALID = "CONFIG_INVALID"
|
|
14
|
+
|
|
15
|
+
GRAPH_NOT_FOUND = "GRAPH_NOT_FOUND"
|
|
16
|
+
GRAPH_IMPORT_ERROR = "GRAPH_IMPORT_ERROR"
|
|
17
|
+
GRAPH_TYPE_ERROR = "GRAPH_TYPE_ERROR"
|
|
18
|
+
GRAPH_VALUE_ERROR = "GRAPH_VALUE_ERROR"
|
|
19
|
+
GRAPH_LOAD_ERROR = "GRAPH_LOAD_ERROR"
|
|
20
|
+
GRAPH_INVALID_UPDATE = "GRAPH_INVALID_UPDATE"
|
|
21
|
+
GRAPH_EMPTY_INPUT = "GRAPH_EMPTY_INPUT"
|
|
22
|
+
|
|
23
|
+
DB_QUERY_FAILED = "DB_QUERY_FAILED"
|
|
24
|
+
DB_TABLE_CREATION_FAILED = "DB_TABLE_CREATION_FAILED"
|
|
25
|
+
HITL_EVENT_CREATION_FAILED = "HITL_EVENT_CREATION_FAILED"
|
|
26
|
+
DB_INSERT_FAILED = "DB_INSERT_FAILED"
|
|
27
|
+
LICENSE_NOT_AVAILABLE = "LICENSE_NOT_AVAILABLE"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class LangGraphRuntimeError(UiPathBaseRuntimeError):
|
|
31
|
+
"""Custom exception for LangGraph runtime errors with structured error information."""
|
|
32
|
+
|
|
33
|
+
def __init__(
|
|
34
|
+
self,
|
|
35
|
+
code: Union[LangGraphErrorCode, UiPathErrorCode],
|
|
36
|
+
title: str,
|
|
37
|
+
detail: str,
|
|
38
|
+
category: UiPathErrorCategory = UiPathErrorCategory.UNKNOWN,
|
|
39
|
+
status: int | None = None,
|
|
40
|
+
):
|
|
41
|
+
super().__init__(
|
|
42
|
+
code.value, title, detail, category, status, prefix="LANGGRAPH"
|
|
43
|
+
)
|