uipath-langchain-client 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. uipath_langchain_client/__init__.py +50 -0
  2. uipath_langchain_client/__version__.py +3 -0
  3. uipath_langchain_client/base_client.py +277 -0
  4. uipath_langchain_client/clients/anthropic/__init__.py +3 -0
  5. uipath_langchain_client/clients/anthropic/chat_models.py +157 -0
  6. uipath_langchain_client/clients/azure/__init__.py +4 -0
  7. uipath_langchain_client/clients/azure/chat_models.py +46 -0
  8. uipath_langchain_client/clients/azure/embeddings.py +46 -0
  9. uipath_langchain_client/clients/bedrock/__init__.py +7 -0
  10. uipath_langchain_client/clients/bedrock/chat_models.py +63 -0
  11. uipath_langchain_client/clients/bedrock/embeddings.py +33 -0
  12. uipath_langchain_client/clients/bedrock/utils.py +90 -0
  13. uipath_langchain_client/clients/google/__init__.py +4 -0
  14. uipath_langchain_client/clients/google/chat_models.py +203 -0
  15. uipath_langchain_client/clients/google/embeddings.py +45 -0
  16. uipath_langchain_client/clients/normalized/__init__.py +4 -0
  17. uipath_langchain_client/clients/normalized/chat_models.py +419 -0
  18. uipath_langchain_client/clients/normalized/embeddings.py +31 -0
  19. uipath_langchain_client/clients/openai/__init__.py +15 -0
  20. uipath_langchain_client/clients/openai/chat_models.py +102 -0
  21. uipath_langchain_client/clients/openai/embeddings.py +82 -0
  22. uipath_langchain_client/clients/vertexai/__init__.py +3 -0
  23. uipath_langchain_client/clients/vertexai/chat_models.py +48 -0
  24. uipath_langchain_client/factory.py +217 -0
  25. uipath_langchain_client/settings.py +32 -0
  26. uipath_langchain_client-1.0.0.dist-info/METADATA +276 -0
  27. uipath_langchain_client-1.0.0.dist-info/RECORD +28 -0
  28. uipath_langchain_client-1.0.0.dist-info/WHEEL +4 -0
@@ -0,0 +1,50 @@
1
+ """
2
+ UiPath LangChain Client
3
+
4
+ LangChain-compatible chat models and embeddings for accessing LLMs through
5
+ UiPath's infrastructure (AgentHub or LLM Gateway).
6
+
7
+ Quick Start:
8
+ >>> from uipath_langchain_client import (
9
+ ... get_chat_model,
10
+ ... get_embedding_model,
11
+ ... get_default_client_settings,
12
+ ... )
13
+ >>>
14
+ >>> # Get settings (auto-detects backend from environment)
15
+ >>> settings = get_default_client_settings()
16
+ >>>
17
+ >>> # Chat model with auto-detected vendor
18
+ >>> chat = get_chat_model("gpt-4o-2024-11-20", settings)
19
+ >>> response = chat.invoke("Hello!")
20
+ >>>
21
+ >>> # Embeddings model
22
+ >>> embeddings = get_embedding_model("text-embedding-3-large", settings)
23
+ >>> vectors = embeddings.embed_documents(["Hello world"])
24
+
25
+ Settings:
26
+ - get_default_client_settings(): Auto-detect backend from environment
27
+ - AgentHubSettings: UiPath AgentHub backend (CLI-based auth)
28
+ - LLMGatewaySettings: UiPath LLM Gateway backend (S2S auth)
29
+
30
+ Factory Functions:
31
+ - get_chat_model(): Create a chat model with auto-detected vendor
32
+ - get_embedding_model(): Create an embeddings model with auto-detected vendor
33
+ """
34
+
35
+ from uipath_langchain_client.__version__ import __version__
36
+ from uipath_langchain_client.factory import get_chat_model, get_embedding_model
37
+ from uipath_langchain_client.settings import (
38
+ AgentHubSettings,
39
+ LLMGatewaySettings,
40
+ get_default_client_settings,
41
+ )
42
+
43
+ __all__ = [
44
+ "__version__",
45
+ "get_chat_model",
46
+ "get_embedding_model",
47
+ "get_default_client_settings",
48
+ "LLMGatewaySettings",
49
+ "AgentHubSettings",
50
+ ]
@@ -0,0 +1,3 @@
1
+ __title__ = "UiPath LangChain Client"
2
+ __description__ = "A Python client for interacting with UiPath's LLM services via LangChain."
3
+ __version__ = "1.0.0"
@@ -0,0 +1,277 @@
1
+ """
2
+ UiPath LLM Client - Core HTTP Client Module
3
+
4
+ This module provides the base HTTP client for interacting with UiPath's LLM services.
5
+ It handles authentication, request/response formatting, retry logic, and logging.
6
+
7
+ The UiPathBaseLLMClient class is designed to be used as a mixin with framework-specific
8
+ chat models (e.g., LangChain, LlamaIndex) to provide UiPath connectivity.
9
+
10
+ Example:
11
+ >>> from uipath_llm_client import UiPathBaseLLMClient, UiPathAPIConfig
12
+ >>> from uipath_langchain_client.settings import get_default_client_settings
13
+ >>>
14
+ >>> client = UiPathBaseLLMClient(
15
+ ... model="gpt-4o-2024-11-20",
16
+ ... api_config=UiPathAPIConfig(
17
+ ... api_type="completions",
18
+ ... client_type="passthrough",
19
+ ... vendor_type="openai",
20
+ ... ),
21
+ ... client_settings=get_default_client_settings(),
22
+ ... )
23
+ >>> response = client.uipath_request(request_body={"messages": [...]})
24
+ """
25
+
26
+ import logging
27
+ from collections.abc import AsyncIterator, Iterator, Mapping
28
+ from functools import cached_property
29
+ from typing import Any, Literal
30
+
31
+ from httpx import URL, Response
32
+ from pydantic import BaseModel, ConfigDict, Field
33
+
34
+ from uipath_langchain_client.settings import (
35
+ UiPathAPIConfig,
36
+ UiPathBaseSettings,
37
+ get_default_client_settings,
38
+ )
39
+ from uipath_llm_client.httpx_client import UiPathHttpxAsyncClient, UiPathHttpxClient
40
+ from uipath_llm_client.utils.retry import RetryConfig
41
+
42
+
43
+ class UiPathBaseLLMClient(BaseModel):
44
+ """Base HTTP client for interacting with UiPath's LLM services.
45
+
46
+ Provides the underlying HTTP transport layer with support for:
47
+ - Authentication and token management
48
+ - Request URL and header formatting
49
+ - Retry logic with configurable backoff
50
+ - Request/response logging
51
+
52
+ This class is typically used as a mixin with framework-specific chat models
53
+ (e.g., LangChain, LlamaIndex) to provide UiPath connectivity.
54
+
55
+ Attributes:
56
+ model_name: Name of the LLM model to use (aliased as "model")
57
+ byo_connection_id: Optional connection ID for Bring Your Own (BYO) models enrolled
58
+ in LLMGateway. When provided, routes requests to your custom-enrolled model.
59
+ api_config: API configuration (api_type, client_type, vendor_type, etc.)
60
+ client_settings: Client configuration (base URL, auth headers, etc.)
61
+ default_headers: Additional headers to include in requests
62
+ request_timeout: Client-side request timeout in seconds
63
+ retry_config: Configuration for retry behavior on failed requests
64
+ logger: Logger instance for request/response logging
65
+ """
66
+
67
+ model_config = ConfigDict(
68
+ arbitrary_types_allowed=True,
69
+ validate_by_alias=True,
70
+ validate_by_name=True,
71
+ validate_default=True,
72
+ )
73
+
74
+ model_name: str = Field(
75
+ alias="model", description="the LLM model name (completions or embeddings)"
76
+ )
77
+ byo_connection_id: str | None = Field(
78
+ default=None,
79
+ description="Bring Your Own (BYO) connection ID for custom models enrolled in LLMGateway. "
80
+ "Use this when you have enrolled your own model deployment and received a connection ID.",
81
+ )
82
+
83
+ api_config: UiPathAPIConfig = Field(
84
+ ...,
85
+ description="Settings for the UiPath API",
86
+ )
87
+ client_settings: UiPathBaseSettings = Field(
88
+ default_factory=get_default_client_settings,
89
+ description="Settings for the UiPath client (defaults based on UIPATH_LLM_BACKEND env var)",
90
+ )
91
+ default_headers: Mapping[str, str] | None = Field(
92
+ default={
93
+ "X-UiPath-LLMGateway-TimeoutSeconds": "30", # server side timeout, default is 10, maximum is 300
94
+ "X-UiPath-LLMGateway-AllowFull4xxResponse": "true", # allow full 4xx responses (default is false)
95
+ },
96
+ description="Default request headers to include in requests",
97
+ )
98
+
99
+ request_timeout: int | None = Field(
100
+ default=None,
101
+ description="Client-side request timeout in seconds",
102
+ )
103
+ max_retries: int = Field(
104
+ default=1,
105
+ description="Maximum number of retries for failed requests",
106
+ )
107
+ retry_config: RetryConfig | None = Field(
108
+ default=None,
109
+ description="Retry configuration for failed requests",
110
+ )
111
+ logger: logging.Logger | None = Field(
112
+ default=None,
113
+ description="Logger for request/response logging",
114
+ )
115
+
116
+ @cached_property
117
+ def uipath_sync_client(self) -> UiPathHttpxClient:
118
+ """Here we instantiate a synchronous HTTP client with the proper authentication pipeline, retry logic, logging etc."""
119
+ return UiPathHttpxClient(
120
+ model_name=self.model_name,
121
+ byo_connection_id=self.byo_connection_id,
122
+ api_config=self.api_config,
123
+ auth=self.client_settings.build_auth_pipeline(),
124
+ base_url=self.client_settings.build_base_url(
125
+ model_name=self.model_name, api_config=self.api_config
126
+ ),
127
+ headers={
128
+ **(self.default_headers or {}),
129
+ **self.client_settings.build_auth_headers(
130
+ model_name=self.model_name, api_config=self.api_config
131
+ ),
132
+ },
133
+ timeout=self.request_timeout,
134
+ max_retries=self.max_retries,
135
+ retry_config=self.retry_config,
136
+ logger=self.logger,
137
+ )
138
+
139
+ @cached_property
140
+ def uipath_async_client(self) -> UiPathHttpxAsyncClient:
141
+ """Here we instantiate an asynchronous HTTP client with the proper authentication pipeline, retry logic, logging etc."""
142
+ return UiPathHttpxAsyncClient(
143
+ model_name=self.model_name,
144
+ byo_connection_id=self.byo_connection_id,
145
+ api_config=self.api_config,
146
+ auth=self.client_settings.build_auth_pipeline(),
147
+ base_url=self.client_settings.build_base_url(
148
+ model_name=self.model_name, api_config=self.api_config
149
+ ),
150
+ headers={
151
+ **(self.default_headers or {}),
152
+ **self.client_settings.build_auth_headers(
153
+ model_name=self.model_name, api_config=self.api_config
154
+ ),
155
+ },
156
+ timeout=self.request_timeout,
157
+ max_retries=self.max_retries,
158
+ retry_config=self.retry_config,
159
+ logger=self.logger,
160
+ )
161
+
162
+ def uipath_request(
163
+ self,
164
+ method: str = "POST",
165
+ url: URL | str = "/",
166
+ *,
167
+ request_body: dict[str, Any] | None = None,
168
+ **kwargs: Any,
169
+ ) -> Response:
170
+ """Make a synchronous HTTP request to the UiPath API.
171
+
172
+ Args:
173
+ method: HTTP method (GET, POST, etc.). Defaults to "POST".
174
+ url: Request URL path. Defaults to "/".
175
+ request_body: JSON request body to send.
176
+ **kwargs: Additional arguments passed to httpx.Client.request().
177
+
178
+ Returns:
179
+ httpx.Response: The HTTP response from the API.
180
+
181
+ Raises:
182
+ UiPathAPIError: On HTTP 4xx/5xx responses (raised by transport layer).
183
+ """
184
+ return self.uipath_sync_client.request(method, url, json=request_body, **kwargs)
185
+
186
+ async def uipath_arequest(
187
+ self,
188
+ method: Literal["POST", "GET"] = "POST",
189
+ url: str = "/",
190
+ *,
191
+ request_body: dict[str, Any] | None = None,
192
+ **kwargs: Any,
193
+ ) -> Response:
194
+ """Make an asynchronous HTTP request to the UiPath API."""
195
+ return await self.uipath_async_client.request(method, url, json=request_body, **kwargs)
196
+
197
+ def uipath_stream(
198
+ self,
199
+ method: Literal["POST", "GET"] = "POST",
200
+ url: str = "/",
201
+ *,
202
+ request_body: dict[str, Any] | None = None,
203
+ stream_type: Literal["text", "bytes", "lines", "raw"] = "lines",
204
+ **kwargs: Any,
205
+ ) -> Iterator[str | bytes]:
206
+ """Make a synchronous streaming HTTP request to the UiPath API.
207
+
208
+ Args:
209
+ method: HTTP method (POST or GET). Defaults to "POST".
210
+ url: Request URL path. Defaults to "/".
211
+ request_body: JSON request body to send.
212
+ stream_type: Type of stream iteration:
213
+ - "text": Yield decoded text chunks
214
+ - "bytes": Yield raw byte chunks
215
+ - "lines": Yield complete lines (default, best for SSE)
216
+ - "raw": Yield raw response data
217
+ **kwargs: Additional arguments passed to httpx.Client.stream().
218
+
219
+ Yields:
220
+ str | bytes: Chunks of the streaming response.
221
+ """
222
+ with self.uipath_sync_client.stream(method, url, json=request_body, **kwargs) as response:
223
+ match stream_type:
224
+ case "text":
225
+ for chunk in response.iter_text():
226
+ yield chunk
227
+ case "bytes":
228
+ for chunk in response.iter_bytes():
229
+ yield chunk
230
+ case "lines":
231
+ for chunk in response.iter_lines():
232
+ yield chunk
233
+ case "raw":
234
+ for chunk in response.iter_raw():
235
+ yield chunk
236
+
237
+ async def uipath_astream(
238
+ self,
239
+ method: Literal["POST", "GET"] = "POST",
240
+ url: str = "/",
241
+ *,
242
+ request_body: dict[str, Any] | None = None,
243
+ stream_type: Literal["text", "bytes", "lines", "raw"] = "lines",
244
+ **kwargs: Any,
245
+ ) -> AsyncIterator[str | bytes]:
246
+ """Make an asynchronous streaming HTTP request to the UiPath API.
247
+
248
+ Args:
249
+ method: HTTP method (POST or GET). Defaults to "POST".
250
+ url: Request URL path. Defaults to "/".
251
+ request_body: JSON request body to send.
252
+ stream_type: Type of stream iteration:
253
+ - "text": Yield decoded text chunks
254
+ - "bytes": Yield raw byte chunks
255
+ - "lines": Yield complete lines (default, best for SSE)
256
+ - "raw": Yield raw response data
257
+ **kwargs: Additional arguments passed to httpx.AsyncClient.stream().
258
+
259
+ Yields:
260
+ str | bytes: Chunks of the streaming response.
261
+ """
262
+ async with self.uipath_async_client.stream(
263
+ method, url, json=request_body, **kwargs
264
+ ) as response:
265
+ match stream_type:
266
+ case "text":
267
+ async for chunk in response.aiter_text():
268
+ yield chunk
269
+ case "bytes":
270
+ async for chunk in response.aiter_bytes():
271
+ yield chunk
272
+ case "lines":
273
+ async for chunk in response.aiter_lines():
274
+ yield chunk
275
+ case "raw":
276
+ async for chunk in response.aiter_raw():
277
+ yield chunk
@@ -0,0 +1,3 @@
1
+ from uipath_langchain_client.clients.anthropic.chat_models import UiPathChatAnthropic
2
+
3
+ __all__ = ["UiPathChatAnthropic"]
@@ -0,0 +1,157 @@
1
+ from functools import cached_property
2
+ from typing import Any, Literal, Self, override
3
+
4
+ from pydantic import Field, model_validator
5
+ from uipath_langchain_client.base_client import UiPathBaseLLMClient
6
+ from uipath_langchain_client.settings import UiPathAPIConfig
7
+
8
+ try:
9
+ from langchain_anthropic.chat_models import ChatAnthropic
10
+
11
+ from anthropic import (
12
+ Anthropic,
13
+ AnthropicBedrock,
14
+ AnthropicFoundry,
15
+ AnthropicVertex,
16
+ AsyncAnthropic,
17
+ AsyncAnthropicBedrock,
18
+ AsyncAnthropicFoundry,
19
+ AsyncAnthropicVertex,
20
+ )
21
+ except ImportError as e:
22
+ raise ImportError(
23
+ "The 'anthropic' extra is required to use UiPathChatAnthropic. "
24
+ "Install it with: uv add uipath-langchain-client[anthropic]"
25
+ ) from e
26
+
27
+
28
+ class UiPathChatAnthropic(UiPathBaseLLMClient, ChatAnthropic):
29
+ api_config: UiPathAPIConfig = UiPathAPIConfig(
30
+ api_type="completions",
31
+ client_type="passthrough",
32
+ vendor_type="anthropic",
33
+ freeze_base_url=True,
34
+ )
35
+ vendor_type: Literal["anthropic", "azure", "vertexai", "awsbedrock"] = "awsbedrock"
36
+
37
+ @model_validator(mode="after")
38
+ def setup_api_flavor_and_version(self) -> Self:
39
+ match self.vendor_type:
40
+ case "vertexai":
41
+ self.api_config.api_flavor = "anthropic-claude"
42
+ self.api_config.api_version = "v1beta1"
43
+ case "awsbedrock":
44
+ self.api_config.api_flavor = "invoke"
45
+ case _:
46
+ raise ValueError("Those vendors are currently not supported")
47
+ self.api_config.vendor_type = self.vendor_type
48
+ return self
49
+
50
+ # Override fields to avoid typing issues and fix stuff
51
+ stop_sequences: list[str] | None = Field(default=None, alias="stop")
52
+ model: str = Field(default="", alias="model_name")
53
+ default_request_timeout: float | None = None
54
+
55
+ @cached_property
56
+ def _anthropic_client(
57
+ self,
58
+ ) -> Anthropic | AnthropicVertex | AnthropicBedrock | AnthropicFoundry:
59
+ match self.vendor_type:
60
+ case "azure":
61
+ return AnthropicFoundry(
62
+ api_key="PLACEHOLDER",
63
+ base_url=str(self.uipath_sync_client.base_url),
64
+ default_headers=dict(self.uipath_sync_client.headers),
65
+ max_retries=1, # handled by the UiPathBaseLLMClient
66
+ timeout=None, # handled by the UiPathBaseLLMClient
67
+ http_client=self.uipath_sync_client,
68
+ )
69
+ case "vertexai":
70
+ return AnthropicVertex(
71
+ region="PLACEHOLDER",
72
+ project_id="PLACEHOLDER",
73
+ access_token="PLACEHOLDER",
74
+ base_url=str(self.uipath_sync_client.base_url),
75
+ default_headers=dict(self.uipath_sync_client.headers),
76
+ timeout=None, # handled by the UiPathBaseLLMClient
77
+ max_retries=1, # handled by the UiPathBaseLLMClient
78
+ http_client=self.uipath_sync_client,
79
+ )
80
+ case "awsbedrock":
81
+ return AnthropicBedrock(
82
+ aws_access_key="PLACEHOLDER",
83
+ aws_secret_key="PLACEHOLDER",
84
+ aws_region="PLACEHOLDER",
85
+ base_url=str(self.uipath_sync_client.base_url),
86
+ default_headers=dict(self.uipath_sync_client.headers),
87
+ timeout=None, # handled by the UiPathBaseLLMClient
88
+ max_retries=1, # handled by the UiPathBaseLLMClient
89
+ http_client=self.uipath_sync_client,
90
+ )
91
+ case "anthropic":
92
+ return Anthropic(
93
+ api_key="PLACEHOLDER",
94
+ base_url=str(self.uipath_sync_client.base_url),
95
+ default_headers=dict(self.uipath_sync_client.headers),
96
+ timeout=None, # handled by the UiPathBaseLLMClient
97
+ max_retries=1, # handled by the UiPathBaseLLMClient
98
+ http_client=self.uipath_sync_client,
99
+ )
100
+
101
+ @cached_property
102
+ def _async_anthropic_client(
103
+ self,
104
+ ) -> AsyncAnthropic | AsyncAnthropicVertex | AsyncAnthropicBedrock | AsyncAnthropicFoundry:
105
+ match self.vendor_type:
106
+ case "azure":
107
+ return AsyncAnthropicFoundry(
108
+ api_key="PLACEHOLDER",
109
+ base_url=str(self.uipath_async_client.base_url),
110
+ default_headers=dict(self.uipath_async_client.headers),
111
+ max_retries=1, # handled by the UiPathBaseLLMClient
112
+ timeout=None, # handled by the UiPathBaseLLMClient
113
+ http_client=self.uipath_async_client,
114
+ )
115
+ case "vertexai":
116
+ return AsyncAnthropicVertex(
117
+ region="PLACEHOLDER",
118
+ project_id="PLACEHOLDER",
119
+ access_token="PLACEHOLDER",
120
+ base_url=str(self.uipath_async_client.base_url),
121
+ default_headers=dict(self.uipath_async_client.headers),
122
+ timeout=None, # handled by the UiPathBaseLLMClient
123
+ max_retries=1, # handled by the UiPathBaseLLMClient
124
+ http_client=self.uipath_async_client,
125
+ )
126
+ case "awsbedrock":
127
+ return AsyncAnthropicBedrock(
128
+ aws_access_key="PLACEHOLDER",
129
+ aws_secret_key="PLACEHOLDER",
130
+ aws_region="PLACEHOLDER",
131
+ base_url=str(self.uipath_async_client.base_url),
132
+ default_headers=dict(self.uipath_async_client.headers),
133
+ timeout=None, # handled by the UiPathBaseLLMClient
134
+ max_retries=1, # handled by the UiPathBaseLLMClient
135
+ http_client=self.uipath_async_client,
136
+ )
137
+ case _:
138
+ return AsyncAnthropic(
139
+ api_key="PLACEHOLDER",
140
+ base_url=str(self.uipath_async_client.base_url),
141
+ default_headers=dict(self.uipath_async_client.headers),
142
+ timeout=None, # handled by the UiPathBaseLLMClient
143
+ max_retries=1, # handled by the UiPathBaseLLMClient
144
+ http_client=self.uipath_async_client,
145
+ )
146
+
147
+ @override
148
+ def _create(self, payload: dict) -> Any:
149
+ if "betas" in payload:
150
+ return self._anthropic_client.beta.messages.create(**payload)
151
+ return self._anthropic_client.messages.create(**payload)
152
+
153
+ @override
154
+ async def _acreate(self, payload: dict) -> Any:
155
+ if "betas" in payload:
156
+ return await self._async_anthropic_client.beta.messages.create(**payload)
157
+ return await self._async_anthropic_client.messages.create(**payload)
@@ -0,0 +1,4 @@
1
+ from uipath_langchain_client.clients.azure.chat_models import UiPathAzureAIChatCompletionsModel
2
+ from uipath_langchain_client.clients.azure.embeddings import UiPathAzureAIEmbeddingsModel
3
+
4
+ __all__ = ["UiPathAzureAIChatCompletionsModel", "UiPathAzureAIEmbeddingsModel"]
@@ -0,0 +1,46 @@
1
+ from typing import Self
2
+
3
+ from pydantic import model_validator
4
+ from uipath_langchain_client.base_client import UiPathBaseLLMClient
5
+ from uipath_langchain_client.settings import UiPathAPIConfig
6
+
7
+ try:
8
+ from langchain_azure_ai.chat_models import AzureAIChatCompletionsModel
9
+
10
+ from azure.ai.inference import ChatCompletionsClient
11
+ from azure.ai.inference.aio import ChatCompletionsClient as ChatCompletionsClientAsync
12
+ from azure.core.credentials import AzureKeyCredential
13
+ except ImportError as e:
14
+ raise ImportError(
15
+ "The 'azure' extra is required to use UiPathAzureAIChatCompletionsModel. "
16
+ "Install it with: uv add uipath-langchain-client[azure]"
17
+ ) from e
18
+
19
+
20
+ class UiPathAzureAIChatCompletionsModel(UiPathBaseLLMClient, AzureAIChatCompletionsModel): # type: ignore[override]
21
+ api_config: UiPathAPIConfig = UiPathAPIConfig(
22
+ api_type="completions",
23
+ client_type="passthrough",
24
+ vendor_type="azure",
25
+ freeze_base_url=True,
26
+ )
27
+
28
+ # Override fields to avoid errors when instantiating the class
29
+ endpoint: str | None = "PLACEHOLDER"
30
+
31
+ @model_validator(mode="after")
32
+ def setup_uipath_client(self) -> Self:
33
+ # TODO: finish implementation once we have a proper model in UiPath API
34
+ self._client = ChatCompletionsClient(
35
+ endpoint="PLACEHOLDER",
36
+ credential=AzureKeyCredential("PLACEHOLDER"),
37
+ model=self.model_name,
38
+ **self.client_kwargs,
39
+ )
40
+ self._async_client = ChatCompletionsClientAsync(
41
+ endpoint="PLACEHOLDER",
42
+ credential=AzureKeyCredential("PLACEHOLDER"),
43
+ model=self.model_name,
44
+ **self.client_kwargs,
45
+ )
46
+ return self
@@ -0,0 +1,46 @@
1
+ from typing import Self
2
+
3
+ from pydantic import model_validator
4
+ from uipath_langchain_client.base_client import UiPathBaseLLMClient
5
+ from uipath_langchain_client.settings import UiPathAPIConfig
6
+
7
+ try:
8
+ from langchain_azure_ai.embeddings import AzureAIEmbeddingsModel
9
+
10
+ from azure.ai.inference import EmbeddingsClient
11
+ from azure.ai.inference.aio import EmbeddingsClient as EmbeddingsClientAsync
12
+ except ImportError as e:
13
+ raise ImportError(
14
+ "The 'azure' extra is required to use UiPathAzureAIEmbeddingsModel. "
15
+ "Install it with: uv add uipath-langchain-client[azure]"
16
+ ) from e
17
+
18
+
19
+ class UiPathAzureAIEmbeddingsModel(UiPathBaseLLMClient, AzureAIEmbeddingsModel): # type: ignore[override]
20
+ api_config: UiPathAPIConfig = UiPathAPIConfig(
21
+ api_type="embeddings",
22
+ client_type="passthrough",
23
+ vendor_type="azure",
24
+ freeze_base_url=True,
25
+ )
26
+
27
+ # Override fields to avoid errors when instantiating the class
28
+ endpoint: str | None = "PLACEHOLDER"
29
+ credentials: str | None = "PLACEHOLDER"
30
+
31
+ @model_validator(mode="after")
32
+ def setup_uipath_client(self) -> Self:
33
+ # TODO: finish implementation once we have a proper model in UiPath API
34
+ self._client = EmbeddingsClient(
35
+ endpoint="PLACEHOLDER",
36
+ credentials="PLACEHOLDER",
37
+ model=self.model_name,
38
+ **self.client_kwargs,
39
+ )
40
+ self._async_client = EmbeddingsClientAsync(
41
+ endpoint="PLACEHOLDER",
42
+ credentials="PLACEHOLDER",
43
+ model=self.model_name,
44
+ **self.client_kwargs,
45
+ )
46
+ return self
@@ -0,0 +1,7 @@
1
+ from uipath_langchain_client.clients.bedrock.chat_models import (
2
+ UiPathChatBedrock,
3
+ UiPathChatBedrockConverse,
4
+ )
5
+ from uipath_langchain_client.clients.bedrock.embeddings import UiPathBedrockEmbeddings
6
+
7
+ __all__ = ["UiPathChatBedrock", "UiPathChatBedrockConverse", "UiPathBedrockEmbeddings"]
@@ -0,0 +1,63 @@
1
+ from typing import Any, Self
2
+
3
+ from pydantic import model_validator
4
+ from uipath_langchain_client.base_client import UiPathBaseLLMClient
5
+ from uipath_langchain_client.settings import UiPathAPIConfig
6
+
7
+ try:
8
+ from langchain_aws.chat_models import ChatBedrock, ChatBedrockConverse
9
+ from uipath_langchain_client.clients.bedrock.utils import WrappedBotoClient
10
+ except ImportError as e:
11
+ raise ImportError(
12
+ "The 'aws' extra is required to use UiPathBedrockChatModel and UiPathBedrockChatModelConverse. "
13
+ "Install it with: uv add uipath-langchain-client[aws]"
14
+ ) from e
15
+
16
+
17
+ class UiPathChatBedrockConverse(UiPathBaseLLMClient, ChatBedrockConverse):
18
+ api_config: UiPathAPIConfig = UiPathAPIConfig(
19
+ api_type="completions",
20
+ client_type="passthrough",
21
+ vendor_type="awsbedrock",
22
+ api_flavor="converse",
23
+ freeze_base_url=True,
24
+ )
25
+
26
+ # Override fields to avoid errors when instantiating the class
27
+ model_id: str = "PLACEHOLDER"
28
+ client: Any = WrappedBotoClient()
29
+ bedrock_client: Any = WrappedBotoClient()
30
+
31
+ @model_validator(mode="after")
32
+ def setup_uipath_client(self) -> Self:
33
+ self.model_id = self.model_name
34
+ self.client = WrappedBotoClient(self.uipath_sync_client)
35
+ return self
36
+
37
+
38
+ class UiPathChatBedrock(UiPathBaseLLMClient, ChatBedrock):
39
+ api_config: UiPathAPIConfig = UiPathAPIConfig(
40
+ api_type="completions",
41
+ client_type="passthrough",
42
+ vendor_type="awsbedrock",
43
+ api_flavor="invoke",
44
+ freeze_base_url=True,
45
+ )
46
+
47
+ # Override fields to avoid errors when instantiating the class
48
+ model_id: str = "PLACEHOLDER"
49
+ client: Any = WrappedBotoClient()
50
+ bedrock_client: Any = WrappedBotoClient()
51
+
52
+ @model_validator(mode="after")
53
+ def setup_uipath_client(self) -> Self:
54
+ self.model_id = self.model_name
55
+ self.client = WrappedBotoClient(self.uipath_sync_client)
56
+ return self
57
+
58
+ @property
59
+ def _as_converse(self) -> UiPathChatBedrockConverse:
60
+ return UiPathChatBedrockConverse(
61
+ model=self.model_name,
62
+ client_settings=self.client_settings,
63
+ )