uipath-langchain-client 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- uipath_langchain_client/__init__.py +50 -0
- uipath_langchain_client/__version__.py +3 -0
- uipath_langchain_client/base_client.py +277 -0
- uipath_langchain_client/clients/anthropic/__init__.py +3 -0
- uipath_langchain_client/clients/anthropic/chat_models.py +157 -0
- uipath_langchain_client/clients/azure/__init__.py +4 -0
- uipath_langchain_client/clients/azure/chat_models.py +46 -0
- uipath_langchain_client/clients/azure/embeddings.py +46 -0
- uipath_langchain_client/clients/bedrock/__init__.py +7 -0
- uipath_langchain_client/clients/bedrock/chat_models.py +63 -0
- uipath_langchain_client/clients/bedrock/embeddings.py +33 -0
- uipath_langchain_client/clients/bedrock/utils.py +90 -0
- uipath_langchain_client/clients/google/__init__.py +4 -0
- uipath_langchain_client/clients/google/chat_models.py +203 -0
- uipath_langchain_client/clients/google/embeddings.py +45 -0
- uipath_langchain_client/clients/normalized/__init__.py +4 -0
- uipath_langchain_client/clients/normalized/chat_models.py +419 -0
- uipath_langchain_client/clients/normalized/embeddings.py +31 -0
- uipath_langchain_client/clients/openai/__init__.py +15 -0
- uipath_langchain_client/clients/openai/chat_models.py +102 -0
- uipath_langchain_client/clients/openai/embeddings.py +82 -0
- uipath_langchain_client/clients/vertexai/__init__.py +3 -0
- uipath_langchain_client/clients/vertexai/chat_models.py +48 -0
- uipath_langchain_client/factory.py +217 -0
- uipath_langchain_client/settings.py +32 -0
- uipath_langchain_client-1.0.0.dist-info/METADATA +276 -0
- uipath_langchain_client-1.0.0.dist-info/RECORD +28 -0
- uipath_langchain_client-1.0.0.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from typing import Self
|
|
2
|
+
|
|
3
|
+
from pydantic import model_validator
|
|
4
|
+
from uipath_langchain_client.base_client import UiPathBaseLLMClient
|
|
5
|
+
from uipath_langchain_client.settings import UiPathAPIConfig
|
|
6
|
+
|
|
7
|
+
try:
|
|
8
|
+
from anthropic import AnthropicVertex, AsyncAnthropicVertex
|
|
9
|
+
from langchain_google_vertexai.model_garden import ChatAnthropicVertex
|
|
10
|
+
except ImportError as e:
|
|
11
|
+
raise ImportError(
|
|
12
|
+
"The 'vertexai' extra is required to use UiPathChatAnthropicVertex. "
|
|
13
|
+
"Install it with: uv add uipath-langchain-client[vertexai]"
|
|
14
|
+
) from e
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class UiPathChatAnthropicVertex(UiPathBaseLLMClient, ChatAnthropicVertex): # type: ignore[override]
|
|
18
|
+
api_config: UiPathAPIConfig = UiPathAPIConfig(
|
|
19
|
+
api_type="completions",
|
|
20
|
+
client_type="passthrough",
|
|
21
|
+
vendor_type="vertexai",
|
|
22
|
+
api_flavor="anthropic-claude",
|
|
23
|
+
freeze_base_url=True,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
@model_validator(mode="after")
|
|
27
|
+
def setup_uipath_client(self) -> Self:
|
|
28
|
+
self.client = AnthropicVertex(
|
|
29
|
+
region="PLACEHOLDER",
|
|
30
|
+
project_id="PLACEHOLDER",
|
|
31
|
+
access_token="PLACEHOLDER",
|
|
32
|
+
base_url=str(self.uipath_sync_client.base_url),
|
|
33
|
+
default_headers=self.uipath_sync_client.headers,
|
|
34
|
+
timeout=None, # handled by the UiPath client
|
|
35
|
+
max_retries=1, # handled by the UiPath client
|
|
36
|
+
http_client=self.uipath_sync_client,
|
|
37
|
+
)
|
|
38
|
+
self.async_client = AsyncAnthropicVertex(
|
|
39
|
+
region="PLACEHOLDER",
|
|
40
|
+
project_id="PLACEHOLDER",
|
|
41
|
+
access_token="PLACEHOLDER",
|
|
42
|
+
base_url=str(self.uipath_async_client.base_url),
|
|
43
|
+
default_headers=self.uipath_async_client.headers,
|
|
44
|
+
timeout=None, # handled by the UiPath client
|
|
45
|
+
max_retries=1, # handled by the UiPath client
|
|
46
|
+
http_client=self.uipath_async_client,
|
|
47
|
+
)
|
|
48
|
+
return self
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Factory Module for UiPath LangChain Client
|
|
3
|
+
|
|
4
|
+
This module provides factory functions that automatically detect the appropriate
|
|
5
|
+
LangChain model class based on the model name and vendor. This simplifies usage
|
|
6
|
+
by eliminating the need to manually import provider-specific classes.
|
|
7
|
+
|
|
8
|
+
The factory queries UiPath's discovery endpoint to determine which vendor
|
|
9
|
+
(OpenAI, Google, Anthropic, etc.) provides a given model, then instantiates
|
|
10
|
+
the correct LangChain wrapper class.
|
|
11
|
+
|
|
12
|
+
Example:
|
|
13
|
+
>>> from uipath_langchain_client import get_chat_model, get_embedding_model
|
|
14
|
+
>>> from uipath_langchain_client.settings import get_default_client_settings
|
|
15
|
+
>>>
|
|
16
|
+
>>> settings = get_default_client_settings()
|
|
17
|
+
>>>
|
|
18
|
+
>>> # Auto-detect vendor from model name
|
|
19
|
+
>>> chat = get_chat_model("gpt-4o-2024-11-20", settings)
|
|
20
|
+
>>> embeddings = get_embedding_model("text-embedding-3-large", settings)
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from typing import Any, Literal
|
|
24
|
+
|
|
25
|
+
from langchain_core.embeddings import Embeddings
|
|
26
|
+
from langchain_core.language_models.chat_models import BaseChatModel
|
|
27
|
+
|
|
28
|
+
from uipath_langchain_client.settings import UiPathBaseSettings, get_default_client_settings
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _get_model_info(
|
|
32
|
+
model_name: str,
|
|
33
|
+
client_settings: UiPathBaseSettings,
|
|
34
|
+
byo_connection_id: str | None = None,
|
|
35
|
+
) -> dict[str, Any]:
|
|
36
|
+
available_models = client_settings.get_available_models()
|
|
37
|
+
|
|
38
|
+
matching_models = [m for m in available_models if m["modelName"].lower() == model_name.lower()]
|
|
39
|
+
|
|
40
|
+
if byo_connection_id:
|
|
41
|
+
matching_models = [
|
|
42
|
+
m
|
|
43
|
+
for m in matching_models
|
|
44
|
+
if (byom_details := m.get("byomDetails"))
|
|
45
|
+
and byom_details.get("integrationServiceConnectionId", "").lower()
|
|
46
|
+
== byo_connection_id.lower()
|
|
47
|
+
]
|
|
48
|
+
|
|
49
|
+
if not byo_connection_id and len(matching_models) > 1:
|
|
50
|
+
matching_models = [m for m in matching_models if m.get("byomDetails") is None]
|
|
51
|
+
|
|
52
|
+
if not matching_models:
|
|
53
|
+
raise ValueError(
|
|
54
|
+
f"Model {model_name} not found in available models the available models are: {[m['modelName'] for m in available_models]}"
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
return matching_models[0]
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def get_chat_model(
|
|
61
|
+
model_name: str,
|
|
62
|
+
byo_connection_id: str | None = None,
|
|
63
|
+
client_settings: UiPathBaseSettings | None = None,
|
|
64
|
+
client_type: Literal["passthrough", "normalized"] = "passthrough",
|
|
65
|
+
**model_kwargs: Any,
|
|
66
|
+
) -> BaseChatModel:
|
|
67
|
+
"""Factory function to create the appropriate LangChain chat model for a given model name.
|
|
68
|
+
|
|
69
|
+
Automatically detects the model vendor and returns the correct LangChain model class.
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
model: Name of the model to use (e.g., "gpt-4", "claude-3-opus")
|
|
73
|
+
client_type: Use "normalized" for provider-agnostic API or "passthrough" for vendor-specific
|
|
74
|
+
**model_kwargs: Additional keyword arguments to pass to the model constructor
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
A LangChain BaseChatModel instance configured for the specified model
|
|
78
|
+
|
|
79
|
+
Raises:
|
|
80
|
+
ValueError: If the model is not found in available models or vendor is not supported
|
|
81
|
+
"""
|
|
82
|
+
client_settings = client_settings or get_default_client_settings()
|
|
83
|
+
model_info = _get_model_info(model_name, client_settings, byo_connection_id)
|
|
84
|
+
|
|
85
|
+
if client_type == "normalized":
|
|
86
|
+
from uipath_langchain_client.clients.normalized.chat_models import (
|
|
87
|
+
UiPathNormalizedChatModel,
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
return UiPathNormalizedChatModel(model=model_name, **model_kwargs)
|
|
91
|
+
|
|
92
|
+
vendor_type = model_info["vendor"].lower()
|
|
93
|
+
match vendor_type:
|
|
94
|
+
case "openai":
|
|
95
|
+
if "gpt" in model_name:
|
|
96
|
+
from uipath_langchain_client.clients.openai.chat_models import (
|
|
97
|
+
UiPathAzureChatOpenAI,
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
return UiPathAzureChatOpenAI(
|
|
101
|
+
model=model_name,
|
|
102
|
+
client_settings=client_settings,
|
|
103
|
+
**model_kwargs,
|
|
104
|
+
)
|
|
105
|
+
else:
|
|
106
|
+
raise ValueError(f"Invalid model name: {model_name} for vendor: {vendor_type}")
|
|
107
|
+
case "vertexai":
|
|
108
|
+
if "gemini" in model_name:
|
|
109
|
+
from uipath_langchain_client.clients.google.chat_models import (
|
|
110
|
+
UiPathChatGoogleGenerativeAI,
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
return UiPathChatGoogleGenerativeAI(
|
|
114
|
+
model=model_name,
|
|
115
|
+
client_settings=client_settings,
|
|
116
|
+
**model_kwargs,
|
|
117
|
+
)
|
|
118
|
+
elif "claude" in model_name:
|
|
119
|
+
from uipath_langchain_client.clients.anthropic.chat_models import (
|
|
120
|
+
UiPathChatAnthropic,
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
return UiPathChatAnthropic(
|
|
124
|
+
model=model_name,
|
|
125
|
+
client_settings=client_settings,
|
|
126
|
+
vendor_type="vertexai",
|
|
127
|
+
**model_kwargs,
|
|
128
|
+
)
|
|
129
|
+
else:
|
|
130
|
+
raise ValueError(f"Invalid model name: {model_name} for vendor: {vendor_type}")
|
|
131
|
+
case "awsbedrock":
|
|
132
|
+
if "claude" in model_name:
|
|
133
|
+
from uipath_langchain_client.clients.anthropic.chat_models import (
|
|
134
|
+
UiPathChatAnthropic,
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
return UiPathChatAnthropic(
|
|
138
|
+
model=model_name,
|
|
139
|
+
client_settings=client_settings,
|
|
140
|
+
vendor_type="awsbedrock",
|
|
141
|
+
**model_kwargs,
|
|
142
|
+
)
|
|
143
|
+
else:
|
|
144
|
+
raise ValueError(f"Invalid model name: {model_name} for vendor: {vendor_type}")
|
|
145
|
+
case _:
|
|
146
|
+
raise ValueError(f"Invalid UiPath vendor type: {vendor_type}")
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def get_embedding_model(
|
|
150
|
+
model: str,
|
|
151
|
+
byo_connection_id: str | None = None,
|
|
152
|
+
client_settings: UiPathBaseSettings | None = None,
|
|
153
|
+
client_type: Literal["passthrough", "normalized"] = "passthrough",
|
|
154
|
+
**model_kwargs: Any,
|
|
155
|
+
) -> Embeddings:
|
|
156
|
+
"""Factory function to create the appropriate LangChain embeddings model.
|
|
157
|
+
|
|
158
|
+
Automatically detects the model vendor and returns the correct LangChain embeddings class.
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
model: Name of the embeddings model (e.g., "text-embedding-3-large").
|
|
162
|
+
client_settings: Client settings for authentication and routing.
|
|
163
|
+
client_type: API mode - "normalized" for provider-agnostic API or
|
|
164
|
+
"passthrough" for vendor-specific APIs.
|
|
165
|
+
**model_kwargs: Additional arguments passed to the embeddings constructor.
|
|
166
|
+
|
|
167
|
+
Returns:
|
|
168
|
+
A LangChain Embeddings instance configured for the specified model.
|
|
169
|
+
|
|
170
|
+
Raises:
|
|
171
|
+
ValueError: If the model is not found or the vendor is not supported.
|
|
172
|
+
|
|
173
|
+
Example:
|
|
174
|
+
>>> settings = get_default_client_settings()
|
|
175
|
+
>>> embeddings = get_embedding_model("text-embedding-3-large", settings)
|
|
176
|
+
>>> vectors = embeddings.embed_documents(["Hello world"])
|
|
177
|
+
"""
|
|
178
|
+
client_settings = client_settings or get_default_client_settings()
|
|
179
|
+
model_info = _get_model_info(model, client_settings, byo_connection_id)
|
|
180
|
+
|
|
181
|
+
if client_type == "normalized":
|
|
182
|
+
from uipath_langchain_client.clients.normalized.embeddings import (
|
|
183
|
+
UiPathNormalizedEmbeddings,
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
return UiPathNormalizedEmbeddings(
|
|
187
|
+
model=model, client_settings=client_settings, **model_kwargs
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
vendor_type = model_info["vendor"].lower()
|
|
191
|
+
match vendor_type:
|
|
192
|
+
case "openai":
|
|
193
|
+
from uipath_langchain_client.clients.openai.embeddings import (
|
|
194
|
+
UiPathAzureOpenAIEmbeddings,
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
return UiPathAzureOpenAIEmbeddings(
|
|
198
|
+
model=model, client_settings=client_settings, **model_kwargs
|
|
199
|
+
)
|
|
200
|
+
case "vertexai":
|
|
201
|
+
from uipath_langchain_client.clients.google.embeddings import (
|
|
202
|
+
UiPathGoogleGenerativeAIEmbeddings,
|
|
203
|
+
)
|
|
204
|
+
|
|
205
|
+
return UiPathGoogleGenerativeAIEmbeddings(
|
|
206
|
+
model=model, client_settings=client_settings, **model_kwargs
|
|
207
|
+
)
|
|
208
|
+
case "awsbedrock":
|
|
209
|
+
from uipath_langchain_client.clients.bedrock.embeddings import (
|
|
210
|
+
UiPathBedrockEmbeddings,
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
return UiPathBedrockEmbeddings(
|
|
214
|
+
model=model, client_settings=client_settings, **model_kwargs
|
|
215
|
+
)
|
|
216
|
+
case _:
|
|
217
|
+
raise ValueError(f"Invalid UiPath Embeddings provider: {vendor_type}")
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Settings re-exports for UiPath LangChain Client.
|
|
3
|
+
|
|
4
|
+
This module re-exports the settings classes from uipath_llm_client for convenience,
|
|
5
|
+
allowing users to configure authentication without importing from the base package.
|
|
6
|
+
|
|
7
|
+
Example:
|
|
8
|
+
>>> from uipath_langchain_client.settings import get_default_client_settings
|
|
9
|
+
>>>
|
|
10
|
+
>>> # Auto-detect backend from environment (defaults to AgentHub)
|
|
11
|
+
>>> settings = get_default_client_settings()
|
|
12
|
+
>>>
|
|
13
|
+
>>> # Or explicitly use LLMGateway
|
|
14
|
+
>>> from uipath_langchain_client.settings import LLMGatewaySettings
|
|
15
|
+
>>> settings = LLMGatewaySettings()
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
from uipath_llm_client.settings import (
|
|
19
|
+
AgentHubSettings,
|
|
20
|
+
LLMGatewaySettings,
|
|
21
|
+
UiPathAPIConfig,
|
|
22
|
+
UiPathBaseSettings,
|
|
23
|
+
get_default_client_settings,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
__all__ = [
|
|
27
|
+
"get_default_client_settings",
|
|
28
|
+
"LLMGatewaySettings",
|
|
29
|
+
"AgentHubSettings",
|
|
30
|
+
"UiPathAPIConfig",
|
|
31
|
+
"UiPathBaseSettings",
|
|
32
|
+
]
|
|
@@ -0,0 +1,276 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: uipath-langchain-client
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: LangChain-compatible chat models and embeddings for UiPath's LLM services
|
|
5
|
+
Requires-Python: >=3.11
|
|
6
|
+
Requires-Dist: langchain>=1.2.7
|
|
7
|
+
Requires-Dist: uipath-llm-client>=1.0.0
|
|
8
|
+
Provides-Extra: all
|
|
9
|
+
Requires-Dist: langchain-anthropic>=1.3.1; extra == 'all'
|
|
10
|
+
Requires-Dist: langchain-aws>=1.2.1; extra == 'all'
|
|
11
|
+
Requires-Dist: langchain-azure-ai>=1.0.0; extra == 'all'
|
|
12
|
+
Requires-Dist: langchain-google-genai>=4.2.0; extra == 'all'
|
|
13
|
+
Requires-Dist: langchain-google-vertexai>=3.2.1; extra == 'all'
|
|
14
|
+
Requires-Dist: langchain-openai>=1.1.7; extra == 'all'
|
|
15
|
+
Provides-Extra: anthropic
|
|
16
|
+
Requires-Dist: langchain-anthropic>=1.3.1; extra == 'anthropic'
|
|
17
|
+
Provides-Extra: aws
|
|
18
|
+
Requires-Dist: langchain-aws>=1.2.1; extra == 'aws'
|
|
19
|
+
Provides-Extra: azure
|
|
20
|
+
Requires-Dist: langchain-azure-ai>=1.0.0; extra == 'azure'
|
|
21
|
+
Provides-Extra: google
|
|
22
|
+
Requires-Dist: langchain-google-genai>=4.2.0; extra == 'google'
|
|
23
|
+
Provides-Extra: openai
|
|
24
|
+
Requires-Dist: langchain-openai>=1.1.7; extra == 'openai'
|
|
25
|
+
Provides-Extra: vertexai
|
|
26
|
+
Requires-Dist: langchain-google-vertexai>=3.2.1; extra == 'vertexai'
|
|
27
|
+
Description-Content-Type: text/markdown
|
|
28
|
+
|
|
29
|
+
# UiPath LangChain Client
|
|
30
|
+
|
|
31
|
+
LangChain-compatible chat models and embeddings for accessing LLMs through UiPath's infrastructure.
|
|
32
|
+
|
|
33
|
+
## Installation
|
|
34
|
+
|
|
35
|
+
```bash
|
|
36
|
+
# Base installation (normalized API only)
|
|
37
|
+
pip install uipath-langchain-client
|
|
38
|
+
|
|
39
|
+
# With specific provider extras for passthrough mode
|
|
40
|
+
pip install "uipath-langchain-client[openai]" # OpenAI/Azure models
|
|
41
|
+
pip install "uipath-langchain-client[google]" # Google Gemini models
|
|
42
|
+
pip install "uipath-langchain-client[anthropic]" # Anthropic Claude models
|
|
43
|
+
pip install "uipath-langchain-client[azure]" # Azure AI models
|
|
44
|
+
pip install "uipath-langchain-client[aws]" # AWS Bedrock models
|
|
45
|
+
pip install "uipath-langchain-client[vertexai]" # Google VertexAI models
|
|
46
|
+
pip install "uipath-langchain-client[all]" # All providers
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
## Quick Start
|
|
50
|
+
|
|
51
|
+
### Using Factory Functions (Recommended)
|
|
52
|
+
|
|
53
|
+
The factory functions automatically detect the model vendor and return the appropriate client:
|
|
54
|
+
|
|
55
|
+
```python
|
|
56
|
+
from uipath_langchain_client import get_chat_model, get_embedding_model
|
|
57
|
+
from uipath_langchain_client.settings import get_default_client_settings
|
|
58
|
+
|
|
59
|
+
# Get default settings (uses UIPATH_LLM_BACKEND env var or defaults to AgentHub)
|
|
60
|
+
settings = get_default_client_settings()
|
|
61
|
+
|
|
62
|
+
# Chat model - vendor auto-detected from model name
|
|
63
|
+
chat_model = get_chat_model(
|
|
64
|
+
model_name="gpt-4o-2024-11-20",
|
|
65
|
+
client_settings=settings,
|
|
66
|
+
)
|
|
67
|
+
response = chat_model.invoke("Hello, how are you?")
|
|
68
|
+
print(response.content)
|
|
69
|
+
|
|
70
|
+
# Embeddings model
|
|
71
|
+
embeddings = get_embedding_model(
|
|
72
|
+
model="text-embedding-3-large",
|
|
73
|
+
client_settings=settings,
|
|
74
|
+
)
|
|
75
|
+
vectors = embeddings.embed_documents(["Hello world"])
|
|
76
|
+
print(f"Embedding dimension: {len(vectors[0])}")
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
### Using Direct Client Classes
|
|
80
|
+
|
|
81
|
+
For more control, instantiate provider-specific classes directly:
|
|
82
|
+
|
|
83
|
+
```python
|
|
84
|
+
from uipath_langchain_client.openai.chat_models import UiPathAzureChatOpenAI
|
|
85
|
+
from uipath_langchain_client.google.chat_models import UiPathChatGoogleGenerativeAI
|
|
86
|
+
from uipath_langchain_client.anthropic.chat_models import UiPathChatAnthropic
|
|
87
|
+
from uipath_langchain_client.normalized.chat_models import UiPathNormalizedChatModel
|
|
88
|
+
from uipath_langchain_client.settings import get_default_client_settings
|
|
89
|
+
|
|
90
|
+
settings = get_default_client_settings()
|
|
91
|
+
|
|
92
|
+
# OpenAI/Azure
|
|
93
|
+
openai_chat = UiPathAzureChatOpenAI(model="gpt-4o-2024-11-20", client_settings=settings)
|
|
94
|
+
|
|
95
|
+
# Google Gemini
|
|
96
|
+
gemini_chat = UiPathChatGoogleGenerativeAI(model="gemini-2.5-flash", client_settings=settings)
|
|
97
|
+
|
|
98
|
+
# Anthropic Claude (via AWS Bedrock)
|
|
99
|
+
claude_chat = UiPathChatAnthropic(
|
|
100
|
+
model="anthropic.claude-sonnet-4-5-20250929-v1:0",
|
|
101
|
+
client_settings=settings,
|
|
102
|
+
vendor_type="awsbedrock",
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
# Normalized (provider-agnostic)
|
|
106
|
+
normalized_chat = UiPathNormalizedChatModel(model="gpt-4o-2024-11-20", client_settings=settings)
|
|
107
|
+
```
|
|
108
|
+
|
|
109
|
+
## Available Client Types
|
|
110
|
+
|
|
111
|
+
### Passthrough Mode (Default)
|
|
112
|
+
|
|
113
|
+
Uses vendor-specific APIs through UiPath's gateway. Full feature parity with native SDKs.
|
|
114
|
+
|
|
115
|
+
| Class | Provider | Models |
|
|
116
|
+
|-------|----------|--------|
|
|
117
|
+
| `UiPathAzureChatOpenAI` | OpenAI/Azure | GPT-4o, GPT-4, GPT-3.5 |
|
|
118
|
+
| `UiPathChatOpenAI` | OpenAI | GPT-4o, GPT-4, GPT-3.5 |
|
|
119
|
+
| `UiPathChatGoogleGenerativeAI` | Google | Gemini 2.5, 2.0, 1.5 |
|
|
120
|
+
| `UiPathChatAnthropic` | Anthropic | Claude Sonnet 4.5, Opus, etc. |
|
|
121
|
+
| `UiPathChatAnthropicVertex` | Anthropic (via VertexAI) | Claude models |
|
|
122
|
+
| `UiPathAzureAIChatCompletionsModel` | Azure AI | Various |
|
|
123
|
+
|
|
124
|
+
### Normalized Mode
|
|
125
|
+
|
|
126
|
+
Uses UiPath's normalized API for a consistent interface across all providers.
|
|
127
|
+
|
|
128
|
+
| Class | Description |
|
|
129
|
+
|-------|-------------|
|
|
130
|
+
| `UiPathNormalizedChatModel` | Provider-agnostic chat completions |
|
|
131
|
+
| `UiPathNormalizedEmbeddings` | Provider-agnostic embeddings |
|
|
132
|
+
|
|
133
|
+
## Features
|
|
134
|
+
|
|
135
|
+
### Streaming
|
|
136
|
+
|
|
137
|
+
```python
|
|
138
|
+
from uipath_langchain_client import get_chat_model
|
|
139
|
+
from uipath_langchain_client.settings import get_default_client_settings
|
|
140
|
+
|
|
141
|
+
settings = get_default_client_settings()
|
|
142
|
+
chat_model = get_chat_model(model_name="gpt-4o-2024-11-20", client_settings=settings)
|
|
143
|
+
|
|
144
|
+
# Sync streaming
|
|
145
|
+
for chunk in chat_model.stream("Write a haiku about Python"):
|
|
146
|
+
print(chunk.content, end="", flush=True)
|
|
147
|
+
|
|
148
|
+
# Async streaming
|
|
149
|
+
async for chunk in chat_model.astream("Write a haiku about Python"):
|
|
150
|
+
print(chunk.content, end="", flush=True)
|
|
151
|
+
```
|
|
152
|
+
|
|
153
|
+
### Tool Calling
|
|
154
|
+
|
|
155
|
+
```python
|
|
156
|
+
from langchain_core.tools import tool
|
|
157
|
+
|
|
158
|
+
@tool
|
|
159
|
+
def get_weather(city: str) -> str:
|
|
160
|
+
"""Get the current weather for a city."""
|
|
161
|
+
return f"Sunny, 72°F in {city}"
|
|
162
|
+
|
|
163
|
+
chat_model = get_chat_model(model_name="gpt-4o-2024-11-20", client_settings=settings)
|
|
164
|
+
model_with_tools = chat_model.bind_tools([get_weather])
|
|
165
|
+
|
|
166
|
+
response = model_with_tools.invoke("What's the weather in Tokyo?")
|
|
167
|
+
print(response.tool_calls)
|
|
168
|
+
```
|
|
169
|
+
|
|
170
|
+
### LangGraph Agents
|
|
171
|
+
|
|
172
|
+
```python
|
|
173
|
+
from langgraph.prebuilt import create_react_agent
|
|
174
|
+
from langchain_core.tools import tool
|
|
175
|
+
|
|
176
|
+
@tool
|
|
177
|
+
def search(query: str) -> str:
|
|
178
|
+
"""Search the web."""
|
|
179
|
+
return f"Results for: {query}"
|
|
180
|
+
|
|
181
|
+
chat_model = get_chat_model(model_name="gpt-4o-2024-11-20", client_settings=settings)
|
|
182
|
+
agent = create_react_agent(chat_model, [search])
|
|
183
|
+
|
|
184
|
+
result = agent.invoke({"messages": [("user", "Search for UiPath documentation")]})
|
|
185
|
+
```
|
|
186
|
+
|
|
187
|
+
### Extended Thinking (Model-Specific)
|
|
188
|
+
|
|
189
|
+
```python
|
|
190
|
+
# OpenAI o1/o3 reasoning
|
|
191
|
+
chat_model = get_chat_model(
|
|
192
|
+
model_name="o3-mini",
|
|
193
|
+
client_settings=settings,
|
|
194
|
+
client_type="normalized",
|
|
195
|
+
reasoning_effort="medium", # "low", "medium", "high"
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
# Anthropic Claude thinking
|
|
199
|
+
chat_model = get_chat_model(
|
|
200
|
+
model_name="claude-sonnet-4-5",
|
|
201
|
+
client_settings=settings,
|
|
202
|
+
client_type="normalized",
|
|
203
|
+
thinking={"type": "enabled", "budget_tokens": 10000},
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
# Gemini thinking
|
|
207
|
+
chat_model = get_chat_model(
|
|
208
|
+
model_name="gemini-2.5-pro",
|
|
209
|
+
client_settings=settings,
|
|
210
|
+
client_type="normalized",
|
|
211
|
+
thinking_level="medium",
|
|
212
|
+
include_thoughts=True,
|
|
213
|
+
)
|
|
214
|
+
```
|
|
215
|
+
|
|
216
|
+
## Configuration
|
|
217
|
+
|
|
218
|
+
### Retry Configuration
|
|
219
|
+
|
|
220
|
+
```python
|
|
221
|
+
# RetryConfig is a TypedDict - all fields are optional with sensible defaults
|
|
222
|
+
retry_config = {
|
|
223
|
+
"initial_delay": 2.0, # Initial delay before first retry
|
|
224
|
+
"max_delay": 60.0, # Maximum delay between retries
|
|
225
|
+
"exp_base": 2.0, # Exponential backoff base
|
|
226
|
+
"jitter": 1.0, # Random jitter to add
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
chat_model = get_chat_model(
|
|
230
|
+
model_name="gpt-4o-2024-11-20",
|
|
231
|
+
client_settings=settings,
|
|
232
|
+
max_retries=3,
|
|
233
|
+
retry_config=retry_config,
|
|
234
|
+
)
|
|
235
|
+
```
|
|
236
|
+
|
|
237
|
+
### Request Timeout
|
|
238
|
+
|
|
239
|
+
```python
|
|
240
|
+
chat_model = get_chat_model(
|
|
241
|
+
model_name="gpt-4o-2024-11-20",
|
|
242
|
+
client_settings=settings,
|
|
243
|
+
request_timeout=120, # Client-side timeout in seconds
|
|
244
|
+
)
|
|
245
|
+
```
|
|
246
|
+
|
|
247
|
+
## API Reference
|
|
248
|
+
|
|
249
|
+
### `get_chat_model()`
|
|
250
|
+
|
|
251
|
+
Factory function to create a chat model.
|
|
252
|
+
|
|
253
|
+
**Parameters:**
|
|
254
|
+
- `model_name` (str): Name of the model (e.g., "gpt-4o-2024-11-20")
|
|
255
|
+
- `client_settings` (UiPathBaseSettings): Client settings for authentication
|
|
256
|
+
- `client_type` (Literal["passthrough", "normalized"]): API mode (default: "passthrough")
|
|
257
|
+
- `**model_kwargs`: Additional arguments passed to the model constructor
|
|
258
|
+
|
|
259
|
+
**Returns:** `BaseChatModel` - A LangChain-compatible chat model
|
|
260
|
+
|
|
261
|
+
### `get_embedding_model()`
|
|
262
|
+
|
|
263
|
+
Factory function to create an embeddings model.
|
|
264
|
+
|
|
265
|
+
**Parameters:**
|
|
266
|
+
- `model` (str): Name of the model (e.g., "text-embedding-3-large")
|
|
267
|
+
- `client_settings` (UiPathBaseSettings): Client settings for authentication
|
|
268
|
+
- `client_type` (Literal["passthrough", "normalized"]): API mode (default: "passthrough")
|
|
269
|
+
- `**model_kwargs`: Additional arguments passed to the model constructor
|
|
270
|
+
|
|
271
|
+
**Returns:** `Embeddings` - A LangChain-compatible embeddings model
|
|
272
|
+
|
|
273
|
+
## See Also
|
|
274
|
+
|
|
275
|
+
- [Main README](../../README.md) - Overview and core client documentation
|
|
276
|
+
- [UiPath LLM Client](../../src/uipath_llm_client/) - Low-level HTTP client
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
uipath_langchain_client/__init__.py,sha256=3rOLh-mBT_DWUNShlnOzzcU9OxT98V7i_fQPcFRAGuQ,1605
|
|
2
|
+
uipath_langchain_client/__version__.py,sha256=n7mSkMnVmsou1wB3oAqSDg6W0Wlr4Jo4fhedKAjOf-o,154
|
|
3
|
+
uipath_langchain_client/base_client.py,sha256=ZOv51sfGpJLS0fQq0kUIy0v0BA_WF6jEFky9EgO7fpQ,10911
|
|
4
|
+
uipath_langchain_client/factory.py,sha256=6KKAQqtdtDS8cwsZ5MUoChiqnDye41svbZ5CeUaggYg,8312
|
|
5
|
+
uipath_langchain_client/settings.py,sha256=mcSXWzZ_jkG_y-1XJOJpDZH9YVHZd6MMbNMMHXujGjw,920
|
|
6
|
+
uipath_langchain_client/clients/anthropic/__init__.py,sha256=cU7tkL3WYQXFZhdkZsQn_sgDQRx9wWzde3Amq0ts94U,121
|
|
7
|
+
uipath_langchain_client/clients/anthropic/chat_models.py,sha256=STj3V_OUORSZ5nKX0BluLxkBDg0VL293fq2ou6hh1Hg,6947
|
|
8
|
+
uipath_langchain_client/clients/azure/__init__.py,sha256=TDeEtcanZNqqqpjPWY8vFOLmKjVIyLXyv4l8VYKlpfY,267
|
|
9
|
+
uipath_langchain_client/clients/azure/chat_models.py,sha256=BvMgDYRGwHhHLbS24aBL0Esp0NOOyf9wP3OkAWLpYEM,1727
|
|
10
|
+
uipath_langchain_client/clients/azure/embeddings.py,sha256=VNxzMNROa9ZIAXUhWsIKgZeRjXj0rNn0wWZlFyZnjZk,1628
|
|
11
|
+
uipath_langchain_client/clients/bedrock/__init__.py,sha256=TDHsmYfsnYHCl7q2UwbABRio-0CfAMP__ry4bjMDVeI,298
|
|
12
|
+
uipath_langchain_client/clients/bedrock/chat_models.py,sha256=F1LuKnwWjiZSeV1ZbsvkusWC10MtCowVJuBpO9S7luc,2149
|
|
13
|
+
uipath_langchain_client/clients/bedrock/embeddings.py,sha256=rLv2bWG46i3CJDfd7X9dB3acASQwTNfcxksZ7TRQjtg,1141
|
|
14
|
+
uipath_langchain_client/clients/bedrock/utils.py,sha256=YYWyerreQj4X2iR-Lsg1i7ELLUi9HLk63AXuzuHp4KM,3129
|
|
15
|
+
uipath_langchain_client/clients/google/__init__.py,sha256=oWzq3pprWR-si-mZt40VrQ5f53f-O7AKSWzqP60VDHE,271
|
|
16
|
+
uipath_langchain_client/clients/google/chat_models.py,sha256=OinMNzBZkmNWUcIqX1wu2EyjZmrbyCyss5H_ijKBJg0,7668
|
|
17
|
+
uipath_langchain_client/clients/google/embeddings.py,sha256=jx2TmzvcxIcKA0iitm5-iqSIj0hqy5RmLnNXMXbkXns,1705
|
|
18
|
+
uipath_langchain_client/clients/normalized/__init__.py,sha256=r0tJ3QvhZPcbNjZJz8ADVCVTcyV1nb0zluwhb-tAAMg,257
|
|
19
|
+
uipath_langchain_client/clients/normalized/chat_models.py,sha256=kIs8kyW5OKWiNWnxn7o8hz6W-yizaFVvd3V7KZAC8Sw,16639
|
|
20
|
+
uipath_langchain_client/clients/normalized/embeddings.py,sha256=CXIuqKZKSa663b0dOubAvp1zHmW6-CgM1M6i7-FcgTA,1275
|
|
21
|
+
uipath_langchain_client/clients/openai/__init__.py,sha256=f6O7otwFkSc5CxVOHpZ1GHEW9g9K2rDg5P_R7cxTrXs,376
|
|
22
|
+
uipath_langchain_client/clients/openai/chat_models.py,sha256=fAaDeR5bQ6FFgufP0Qq8Hs3f-1QKHvLNvghGw18pGzQ,4029
|
|
23
|
+
uipath_langchain_client/clients/openai/embeddings.py,sha256=qMuXmuv2G4WWtEeOGqEpz27DnEPKED3e3M4LOGGTwgw,3226
|
|
24
|
+
uipath_langchain_client/clients/vertexai/__init__.py,sha256=g_rPq14ij0DPxUC4tX7vNDB5KVvfULxOUXyReM97PZw,132
|
|
25
|
+
uipath_langchain_client/clients/vertexai/chat_models.py,sha256=K1yG_uXnjryFz6tg4UZepNnRVB6IQd6zhdDo4Sp9ubQ,1880
|
|
26
|
+
uipath_langchain_client-1.0.0.dist-info/METADATA,sha256=OD-1MZCwNjYriN3U4RwM0S4nG-ER6-fxQSvT7wGpwX4,8762
|
|
27
|
+
uipath_langchain_client-1.0.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
28
|
+
uipath_langchain_client-1.0.0.dist-info/RECORD,,
|