uipath-langchain-client 1.0.3__tar.gz → 1.0.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/CHANGELOG.md +21 -0
  2. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/PKG-INFO +7 -7
  3. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/README.md +5 -5
  4. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/demo.py +2 -2
  5. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/pyproject.toml +1 -1
  6. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/__init__.py +2 -2
  7. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/__version__.py +1 -1
  8. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/base_client.py +7 -4
  9. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/anthropic/chat_models.py +8 -8
  10. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/bedrock/chat_models.py +1 -1
  11. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/normalized/chat_models.py +2 -2
  12. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/openai/chat_models.py +4 -4
  13. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/openai/embeddings.py +4 -4
  14. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/vertexai/chat_models.py +2 -2
  15. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/factory.py +122 -43
  16. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/.gitignore +0 -0
  17. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/anthropic/__init__.py +0 -0
  18. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/azure/__init__.py +0 -0
  19. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/azure/chat_models.py +0 -0
  20. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/azure/embeddings.py +0 -0
  21. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/bedrock/__init__.py +0 -0
  22. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/bedrock/embeddings.py +0 -0
  23. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/bedrock/utils.py +0 -0
  24. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/google/__init__.py +0 -0
  25. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/google/chat_models.py +0 -0
  26. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/google/embeddings.py +0 -0
  27. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/normalized/__init__.py +0 -0
  28. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/normalized/embeddings.py +0 -0
  29. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/openai/__init__.py +0 -0
  30. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/clients/vertexai/__init__.py +0 -0
  31. {uipath_langchain_client-1.0.3 → uipath_langchain_client-1.0.7}/src/uipath_langchain_client/settings.py +0 -0
@@ -2,6 +2,27 @@
2
2
 
3
3
  All notable changes to `uipath_langchain_client` will be documented in this file.
4
4
 
5
+ ## [1.0.7] - 2026-02-04
6
+
7
+ ### Refactor
8
+ - Refactor factory function to include byom models
9
+
10
+ ## [1.0.6] - 2026-02-03
11
+
12
+ ### Refactor
13
+ - Updated documentation to include the new aliases for settings
14
+ - New alias for settings and request timeout in BaseLLMClient
15
+
16
+ ## [1.0.5] - 2026-02-03
17
+
18
+ ### Bug Fix
19
+ - Fixed retry logic on all clients
20
+
21
+ ## [1.0.4] - 2026-02-03
22
+
23
+ ### Bug Fix
24
+ - Fix some timout issues on langchain_openai from llmgw.
25
+
5
26
  ## [1.0.3] - 2026-02-02
6
27
 
7
28
  ### Bug Fix
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: uipath-langchain-client
3
- Version: 1.0.3
3
+ Version: 1.0.7
4
4
  Summary: LangChain-compatible chat models and embeddings for UiPath's LLM services
5
5
  Requires-Python: >=3.11
6
6
  Requires-Dist: langchain>=1.2.7
7
- Requires-Dist: uipath-llm-client>=1.0.3
7
+ Requires-Dist: uipath-llm-client>=1.0.6
8
8
  Provides-Extra: all
9
9
  Requires-Dist: anthropic[bedrock,vertex]>=0.77.0; extra == 'all'
10
10
  Requires-Dist: langchain-anthropic>=1.3.1; extra == 'all'
@@ -71,7 +71,7 @@ print(response.content)
71
71
 
72
72
  # Embeddings model
73
73
  embeddings = get_embedding_model(
74
- model="text-embedding-3-large",
74
+ model_name="text-embedding-3-large",
75
75
  client_settings=settings,
76
76
  )
77
77
  vectors = embeddings.embed_documents(["Hello world"])
@@ -92,20 +92,20 @@ from uipath_langchain_client.settings import get_default_client_settings
92
92
  settings = get_default_client_settings()
93
93
 
94
94
  # OpenAI/Azure
95
- openai_chat = UiPathAzureChatOpenAI(model="gpt-4o-2024-11-20", client_settings=settings)
95
+ openai_chat = UiPathAzureChatOpenAI(model="gpt-4o-2024-11-20", settings=settings)
96
96
 
97
97
  # Google Gemini
98
- gemini_chat = UiPathChatGoogleGenerativeAI(model="gemini-2.5-flash", client_settings=settings)
98
+ gemini_chat = UiPathChatGoogleGenerativeAI(model="gemini-2.5-flash", settings=settings)
99
99
 
100
100
  # Anthropic Claude (via AWS Bedrock)
101
101
  claude_chat = UiPathChatAnthropic(
102
102
  model="anthropic.claude-sonnet-4-5-20250929-v1:0",
103
- client_settings=settings,
103
+ settings=settings,
104
104
  vendor_type="awsbedrock",
105
105
  )
106
106
 
107
107
  # Normalized (provider-agnostic)
108
- normalized_chat = UiPathNormalizedChatModel(model="gpt-4o-2024-11-20", client_settings=settings)
108
+ normalized_chat = UiPathNormalizedChatModel(model="gpt-4o-2024-11-20", settings=settings)
109
109
  ```
110
110
 
111
111
  ## Available Client Types
@@ -41,7 +41,7 @@ print(response.content)
41
41
 
42
42
  # Embeddings model
43
43
  embeddings = get_embedding_model(
44
- model="text-embedding-3-large",
44
+ model_name="text-embedding-3-large",
45
45
  client_settings=settings,
46
46
  )
47
47
  vectors = embeddings.embed_documents(["Hello world"])
@@ -62,20 +62,20 @@ from uipath_langchain_client.settings import get_default_client_settings
62
62
  settings = get_default_client_settings()
63
63
 
64
64
  # OpenAI/Azure
65
- openai_chat = UiPathAzureChatOpenAI(model="gpt-4o-2024-11-20", client_settings=settings)
65
+ openai_chat = UiPathAzureChatOpenAI(model="gpt-4o-2024-11-20", settings=settings)
66
66
 
67
67
  # Google Gemini
68
- gemini_chat = UiPathChatGoogleGenerativeAI(model="gemini-2.5-flash", client_settings=settings)
68
+ gemini_chat = UiPathChatGoogleGenerativeAI(model="gemini-2.5-flash", settings=settings)
69
69
 
70
70
  # Anthropic Claude (via AWS Bedrock)
71
71
  claude_chat = UiPathChatAnthropic(
72
72
  model="anthropic.claude-sonnet-4-5-20250929-v1:0",
73
- client_settings=settings,
73
+ settings=settings,
74
74
  vendor_type="awsbedrock",
75
75
  )
76
76
 
77
77
  # Normalized (provider-agnostic)
78
- normalized_chat = UiPathNormalizedChatModel(model="gpt-4o-2024-11-20", client_settings=settings)
78
+ normalized_chat = UiPathNormalizedChatModel(model="gpt-4o-2024-11-20", settings=settings)
79
79
  ```
80
80
 
81
81
  ## Available Client Types
@@ -167,7 +167,7 @@ def demo_embeddings():
167
167
 
168
168
  settings = get_default_client_settings()
169
169
  embeddings = get_embedding_model(
170
- model="text-embedding-3-large",
170
+ model_name="text-embedding-3-large",
171
171
  client_settings=settings,
172
172
  )
173
173
 
@@ -241,7 +241,7 @@ def demo_direct_client_usage():
241
241
  # Use the Azure OpenAI client directly for more control
242
242
  chat_model = UiPathAzureChatOpenAI(
243
243
  model="gpt-4o-2024-11-20",
244
- client_settings=settings,
244
+ settings=settings,
245
245
  temperature=0.7,
246
246
  )
247
247
 
@@ -6,7 +6,7 @@ readme = "README.md"
6
6
  requires-python = ">=3.11"
7
7
  dependencies = [
8
8
  "langchain>=1.2.7",
9
- "uipath-llm-client>=1.0.3",
9
+ "uipath-llm-client>=1.0.6",
10
10
  ]
11
11
 
12
12
  [project.optional-dependencies]
@@ -15,11 +15,11 @@ Quick Start:
15
15
  >>> settings = get_default_client_settings()
16
16
  >>>
17
17
  >>> # Chat model with auto-detected vendor
18
- >>> chat = get_chat_model("gpt-4o-2024-11-20", settings)
18
+ >>> chat = get_chat_model(model_name="gpt-4o-2024-11-20", client_settings=settings)
19
19
  >>> response = chat.invoke("Hello!")
20
20
  >>>
21
21
  >>> # Embeddings model
22
- >>> embeddings = get_embedding_model("text-embedding-3-large", settings)
22
+ >>> embeddings = get_embedding_model(model_name="text-embedding-3-large", client_settings=settings)
23
23
  >>> vectors = embeddings.embed_documents(["Hello world"])
24
24
 
25
25
  Settings:
@@ -1,3 +1,3 @@
1
1
  __title__ = "UiPath LangChain Client"
2
2
  __description__ = "A Python client for interacting with UiPath's LLM services via LangChain."
3
- __version__ = "1.0.3"
3
+ __version__ = "1.0.7"
@@ -29,7 +29,7 @@ from functools import cached_property
29
29
  from typing import Any, Literal
30
30
 
31
31
  from httpx import URL, Response
32
- from pydantic import BaseModel, ConfigDict, Field
32
+ from pydantic import AliasChoices, BaseModel, ConfigDict, Field
33
33
 
34
34
  from uipath_langchain_client.settings import (
35
35
  UiPathAPIConfig,
@@ -85,23 +85,26 @@ class UiPathBaseLLMClient(BaseModel):
85
85
  description="Settings for the UiPath API",
86
86
  )
87
87
  client_settings: UiPathBaseSettings = Field(
88
+ alias="settings",
88
89
  default_factory=get_default_client_settings,
89
90
  description="Settings for the UiPath client (defaults based on UIPATH_LLM_BACKEND env var)",
90
91
  )
92
+
91
93
  default_headers: Mapping[str, str] | None = Field(
92
94
  default={
93
- "X-UiPath-LLMGateway-TimeoutSeconds": "30", # server side timeout, default is 10, maximum is 300
95
+ "X-UiPath-LLMGateway-TimeoutSeconds": "300", # server side timeout, default is 10, maximum is 300
94
96
  "X-UiPath-LLMGateway-AllowFull4xxResponse": "true", # allow full 4xx responses (default is false)
95
97
  },
96
98
  description="Default request headers to include in requests",
97
99
  )
98
-
99
100
  request_timeout: int | None = Field(
101
+ alias="timeout",
102
+ validation_alias=AliasChoices("timeout", "request_timeout", "default_request_timeout"),
100
103
  default=None,
101
104
  description="Client-side request timeout in seconds",
102
105
  )
103
106
  max_retries: int = Field(
104
- default=1,
107
+ default=0,
105
108
  description="Maximum number of retries for failed requests",
106
109
  )
107
110
  retry_config: RetryConfig | None = Field(
@@ -62,7 +62,7 @@ class UiPathChatAnthropic(UiPathBaseLLMClient, ChatAnthropic):
62
62
  api_key="PLACEHOLDER",
63
63
  base_url=str(self.uipath_sync_client.base_url),
64
64
  default_headers=dict(self.uipath_sync_client.headers),
65
- max_retries=1, # handled by the UiPathBaseLLMClient
65
+ max_retries=0, # handled by the UiPathBaseLLMClient
66
66
  timeout=None, # handled by the UiPathBaseLLMClient
67
67
  http_client=self.uipath_sync_client,
68
68
  )
@@ -74,7 +74,7 @@ class UiPathChatAnthropic(UiPathBaseLLMClient, ChatAnthropic):
74
74
  base_url=str(self.uipath_sync_client.base_url),
75
75
  default_headers=dict(self.uipath_sync_client.headers),
76
76
  timeout=None, # handled by the UiPathBaseLLMClient
77
- max_retries=1, # handled by the UiPathBaseLLMClient
77
+ max_retries=0, # handled by the UiPathBaseLLMClient
78
78
  http_client=self.uipath_sync_client,
79
79
  )
80
80
  case "awsbedrock":
@@ -85,7 +85,7 @@ class UiPathChatAnthropic(UiPathBaseLLMClient, ChatAnthropic):
85
85
  base_url=str(self.uipath_sync_client.base_url),
86
86
  default_headers=dict(self.uipath_sync_client.headers),
87
87
  timeout=None, # handled by the UiPathBaseLLMClient
88
- max_retries=1, # handled by the UiPathBaseLLMClient
88
+ max_retries=0, # handled by the UiPathBaseLLMClient
89
89
  http_client=self.uipath_sync_client,
90
90
  )
91
91
  case "anthropic":
@@ -94,7 +94,7 @@ class UiPathChatAnthropic(UiPathBaseLLMClient, ChatAnthropic):
94
94
  base_url=str(self.uipath_sync_client.base_url),
95
95
  default_headers=dict(self.uipath_sync_client.headers),
96
96
  timeout=None, # handled by the UiPathBaseLLMClient
97
- max_retries=1, # handled by the UiPathBaseLLMClient
97
+ max_retries=0, # handled by the UiPathBaseLLMClient
98
98
  http_client=self.uipath_sync_client,
99
99
  )
100
100
 
@@ -108,7 +108,7 @@ class UiPathChatAnthropic(UiPathBaseLLMClient, ChatAnthropic):
108
108
  api_key="PLACEHOLDER",
109
109
  base_url=str(self.uipath_async_client.base_url),
110
110
  default_headers=dict(self.uipath_async_client.headers),
111
- max_retries=1, # handled by the UiPathBaseLLMClient
111
+ max_retries=0, # handled by the UiPathBaseLLMClient
112
112
  timeout=None, # handled by the UiPathBaseLLMClient
113
113
  http_client=self.uipath_async_client,
114
114
  )
@@ -120,7 +120,7 @@ class UiPathChatAnthropic(UiPathBaseLLMClient, ChatAnthropic):
120
120
  base_url=str(self.uipath_async_client.base_url),
121
121
  default_headers=dict(self.uipath_async_client.headers),
122
122
  timeout=None, # handled by the UiPathBaseLLMClient
123
- max_retries=1, # handled by the UiPathBaseLLMClient
123
+ max_retries=0, # handled by the UiPathBaseLLMClient
124
124
  http_client=self.uipath_async_client,
125
125
  )
126
126
  case "awsbedrock":
@@ -131,7 +131,7 @@ class UiPathChatAnthropic(UiPathBaseLLMClient, ChatAnthropic):
131
131
  base_url=str(self.uipath_async_client.base_url),
132
132
  default_headers=dict(self.uipath_async_client.headers),
133
133
  timeout=None, # handled by the UiPathBaseLLMClient
134
- max_retries=1, # handled by the UiPathBaseLLMClient
134
+ max_retries=0, # handled by the UiPathBaseLLMClient
135
135
  http_client=self.uipath_async_client,
136
136
  )
137
137
  case _:
@@ -140,7 +140,7 @@ class UiPathChatAnthropic(UiPathBaseLLMClient, ChatAnthropic):
140
140
  base_url=str(self.uipath_async_client.base_url),
141
141
  default_headers=dict(self.uipath_async_client.headers),
142
142
  timeout=None, # handled by the UiPathBaseLLMClient
143
- max_retries=1, # handled by the UiPathBaseLLMClient
143
+ max_retries=0, # handled by the UiPathBaseLLMClient
144
144
  http_client=self.uipath_async_client,
145
145
  )
146
146
 
@@ -59,5 +59,5 @@ class UiPathChatBedrock(UiPathBaseLLMClient, ChatBedrock):
59
59
  def _as_converse(self) -> UiPathChatBedrockConverse:
60
60
  return UiPathChatBedrockConverse(
61
61
  model=self.model_name,
62
- client_settings=self.client_settings,
62
+ settings=self.client_settings,
63
63
  )
@@ -18,7 +18,7 @@ Example:
18
18
  >>> settings = get_default_client_settings()
19
19
  >>> chat = UiPathNormalizedChatModel(
20
20
  ... model="gpt-4o-2024-11-20",
21
- ... client_settings=settings,
21
+ ... settings=settings,
22
22
  ... )
23
23
  >>> response = chat.invoke("Hello!")
24
24
  """
@@ -88,7 +88,7 @@ class UiPathNormalizedChatModel(UiPathBaseLLMClient, BaseChatModel):
88
88
  Example:
89
89
  >>> chat = UiPathNormalizedChatModel(
90
90
  ... model="gpt-4o-2024-11-20",
91
- ... client_settings=settings,
91
+ ... settings=settings,
92
92
  ... temperature=0.7,
93
93
  ... max_tokens=1000,
94
94
  ... )
@@ -43,13 +43,13 @@ class UiPathChatOpenAI(UiPathBaseLLMClient, ChatOpenAI): # type: ignore[overrid
43
43
  self.root_client = OpenAI(
44
44
  api_key="PLACEHOLDER",
45
45
  timeout=None, # handled by the UiPath client
46
- max_retries=1, # handled by the UiPath client
46
+ max_retries=0, # handled by the UiPath client
47
47
  http_client=self.uipath_sync_client,
48
48
  )
49
49
  self.root_async_client = AsyncOpenAI(
50
50
  api_key="PLACEHOLDER",
51
51
  timeout=None, # handled by the UiPath client
52
- max_retries=1, # handled by the UiPath client
52
+ max_retries=0, # handled by the UiPath client
53
53
  http_client=self.uipath_async_client,
54
54
  )
55
55
  self.client = self.root_client.chat.completions
@@ -86,7 +86,7 @@ class UiPathAzureChatOpenAI(UiPathBaseLLMClient, AzureChatOpenAI): # type: igno
86
86
  api_version="PLACEHOLDER",
87
87
  api_key="PLACEHOLDER",
88
88
  timeout=None, # handled by the UiPath client
89
- max_retries=1, # handled by the UiPath client
89
+ max_retries=0, # handled by the UiPath client
90
90
  http_client=self.uipath_sync_client,
91
91
  )
92
92
  self.root_async_client = AsyncAzureOpenAI(
@@ -94,7 +94,7 @@ class UiPathAzureChatOpenAI(UiPathBaseLLMClient, AzureChatOpenAI): # type: igno
94
94
  api_version="PLACEHOLDER",
95
95
  api_key="PLACEHOLDER",
96
96
  timeout=None, # handled by the UiPath client
97
- max_retries=1, # handled by the UiPath client
97
+ max_retries=0, # handled by the UiPath client
98
98
  http_client=self.uipath_async_client,
99
99
  )
100
100
  self.client = self.root_client.chat.completions
@@ -36,13 +36,13 @@ class UiPathOpenAIEmbeddings(UiPathBaseLLMClient, OpenAIEmbeddings):
36
36
  self.client = OpenAI(
37
37
  api_key="PLACEHOLDER",
38
38
  timeout=None, # handled by the UiPath client
39
- max_retries=1, # handled by the UiPath client
39
+ max_retries=0, # handled by the UiPath client
40
40
  http_client=self.uipath_sync_client,
41
41
  ).embeddings
42
42
  self.async_client = AsyncOpenAI(
43
43
  api_key="PLACEHOLDER",
44
44
  timeout=None, # handled by the UiPath client
45
- max_retries=1, # handled by the UiPath client
45
+ max_retries=0, # handled by the UiPath client
46
46
  http_client=self.uipath_async_client,
47
47
  ).embeddings
48
48
  return self
@@ -70,7 +70,7 @@ class UiPathAzureOpenAIEmbeddings(UiPathBaseLLMClient, AzureOpenAIEmbeddings):
70
70
  api_version="PLACEHOLDER",
71
71
  api_key="PLACEHOLDER",
72
72
  timeout=None, # handled by the UiPath client
73
- max_retries=1, # handled by the UiPath client
73
+ max_retries=0, # handled by the UiPath client
74
74
  http_client=self.uipath_sync_client,
75
75
  ).embeddings
76
76
  self.async_client = AsyncAzureOpenAI(
@@ -78,7 +78,7 @@ class UiPathAzureOpenAIEmbeddings(UiPathBaseLLMClient, AzureOpenAIEmbeddings):
78
78
  api_version="PLACEHOLDER",
79
79
  api_key="PLACEHOLDER",
80
80
  timeout=None, # handled by the UiPath client
81
- max_retries=1, # handled by the UiPath client
81
+ max_retries=0, # handled by the UiPath client
82
82
  http_client=self.uipath_async_client,
83
83
  ).embeddings
84
84
  return self
@@ -36,7 +36,7 @@ class UiPathChatAnthropicVertex(UiPathBaseLLMClient, ChatAnthropicVertex): # ty
36
36
  base_url=str(self.uipath_sync_client.base_url),
37
37
  default_headers=self.uipath_sync_client.headers,
38
38
  timeout=None, # handled by the UiPath client
39
- max_retries=1, # handled by the UiPath client
39
+ max_retries=0, # handled by the UiPath client
40
40
  http_client=self.uipath_sync_client,
41
41
  )
42
42
  self.async_client = AsyncAnthropicVertex(
@@ -46,7 +46,7 @@ class UiPathChatAnthropicVertex(UiPathBaseLLMClient, ChatAnthropicVertex): # ty
46
46
  base_url=str(self.uipath_async_client.base_url),
47
47
  default_headers=self.uipath_async_client.headers,
48
48
  timeout=None, # handled by the UiPath client
49
- max_retries=1, # handled by the UiPath client
49
+ max_retries=0, # handled by the UiPath client
50
50
  http_client=self.uipath_async_client,
51
51
  )
52
52
  return self
@@ -16,8 +16,8 @@ Example:
16
16
  >>> settings = get_default_client_settings()
17
17
  >>>
18
18
  >>> # Auto-detect vendor from model name
19
- >>> chat = get_chat_model("gpt-4o-2024-11-20", settings)
20
- >>> embeddings = get_embedding_model("text-embedding-3-large", settings)
19
+ >>> chat = get_chat_model(model_name="gpt-4o-2024-11-20", client_settings=settings)
20
+ >>> embeddings = get_embedding_model(model_name="text-embedding-3-large", client_settings=settings)
21
21
  """
22
22
 
23
23
  from typing import Any, Literal
@@ -47,7 +47,14 @@ def _get_model_info(
47
47
  ]
48
48
 
49
49
  if not byo_connection_id and len(matching_models) > 1:
50
- matching_models = [m for m in matching_models if m.get("byomDetails") is None]
50
+ matching_models = [
51
+ m
52
+ for m in matching_models
53
+ if (
54
+ (m.get("modelSubscriptionType", "") == "UiPathOwned")
55
+ or (m.get("byomDetails") is None)
56
+ )
57
+ ]
51
58
 
52
59
  if not matching_models:
53
60
  raise ValueError(
@@ -87,67 +94,115 @@ def get_chat_model(
87
94
  UiPathNormalizedChatModel,
88
95
  )
89
96
 
90
- return UiPathNormalizedChatModel(model=model_name, **model_kwargs)
97
+ return UiPathNormalizedChatModel(
98
+ model=model_name,
99
+ settings=client_settings,
100
+ byo_connection_id=byo_connection_id,
101
+ **model_kwargs,
102
+ )
91
103
 
92
104
  vendor_type = model_info["vendor"].lower()
105
+ is_uipath_owned = model_info.get("modelSubscriptionType") == "UiPathOwned"
93
106
  match vendor_type:
94
107
  case "openai":
95
- if "gpt" in model_name:
108
+ if is_uipath_owned:
96
109
  from uipath_langchain_client.clients.openai.chat_models import (
97
110
  UiPathAzureChatOpenAI,
98
111
  )
99
112
 
100
113
  return UiPathAzureChatOpenAI(
101
114
  model=model_name,
102
- client_settings=client_settings,
115
+ settings=client_settings,
103
116
  **model_kwargs,
104
117
  )
105
118
  else:
106
- raise ValueError(f"Invalid model name: {model_name} for vendor: {vendor_type}")
107
- case "vertexai":
108
- if "gemini" in model_name:
109
- from uipath_langchain_client.clients.google.chat_models import (
110
- UiPathChatGoogleGenerativeAI,
119
+ from uipath_langchain_client.clients.openai.chat_models import (
120
+ UiPathChatOpenAI,
111
121
  )
112
122
 
113
- return UiPathChatGoogleGenerativeAI(
123
+ return UiPathChatOpenAI(
114
124
  model=model_name,
115
- client_settings=client_settings,
125
+ settings=client_settings,
126
+ byo_connection_id=byo_connection_id,
116
127
  **model_kwargs,
117
128
  )
118
- elif "claude" in model_name:
119
- from uipath_langchain_client.clients.anthropic.chat_models import (
120
- UiPathChatAnthropic,
129
+ case "vertexai":
130
+ if is_uipath_owned:
131
+ if "claude" in model_name:
132
+ from uipath_langchain_client.clients.vertexai.chat_models import (
133
+ UiPathChatAnthropicVertex,
134
+ )
135
+
136
+ return UiPathChatAnthropicVertex(
137
+ model=model_name,
138
+ settings=client_settings,
139
+ **model_kwargs,
140
+ )
141
+ elif "gemini" in model_name:
142
+ from uipath_langchain_client.clients.google.chat_models import (
143
+ UiPathChatGoogleGenerativeAI,
144
+ )
145
+
146
+ return UiPathChatGoogleGenerativeAI(
147
+ model=model_name,
148
+ settings=client_settings,
149
+ **model_kwargs,
150
+ )
151
+ else:
152
+ raise ValueError(
153
+ f"We don't have a client that currently supports this model: {model_name} on vendor: {vendor_type}"
154
+ )
155
+ else:
156
+ from uipath_langchain_client.clients.google.chat_models import (
157
+ UiPathChatGoogleGenerativeAI,
121
158
  )
122
159
 
123
- return UiPathChatAnthropic(
160
+ return UiPathChatGoogleGenerativeAI(
124
161
  model=model_name,
125
- client_settings=client_settings,
126
- vendor_type="vertexai",
162
+ settings=client_settings,
163
+ byo_connection_id=byo_connection_id,
127
164
  **model_kwargs,
128
165
  )
129
- else:
130
- raise ValueError(f"Invalid model name: {model_name} for vendor: {vendor_type}")
131
166
  case "awsbedrock":
132
- if "claude" in model_name:
133
- from uipath_langchain_client.clients.anthropic.chat_models import (
134
- UiPathChatAnthropic,
167
+ if is_uipath_owned:
168
+ if "claude" in model_name:
169
+ from uipath_langchain_client.clients.anthropic.chat_models import (
170
+ UiPathChatAnthropic,
171
+ )
172
+
173
+ return UiPathChatAnthropic(
174
+ model=model_name,
175
+ settings=client_settings,
176
+ **model_kwargs,
177
+ )
178
+ else:
179
+ from uipath_langchain_client.clients.bedrock.chat_models import (
180
+ UiPathChatBedrock,
181
+ )
182
+
183
+ return UiPathChatBedrock(
184
+ model=model_name,
185
+ settings=client_settings,
186
+ **model_kwargs,
187
+ )
188
+ else:
189
+ from uipath_langchain_client.clients.bedrock.chat_models import (
190
+ UiPathChatBedrockConverse,
135
191
  )
136
192
 
137
- return UiPathChatAnthropic(
193
+ return UiPathChatBedrockConverse(
138
194
  model=model_name,
139
- client_settings=client_settings,
140
- vendor_type="awsbedrock",
195
+ settings=client_settings,
141
196
  **model_kwargs,
142
197
  )
143
- else:
144
- raise ValueError(f"Invalid model name: {model_name} for vendor: {vendor_type}")
145
198
  case _:
146
- raise ValueError(f"Invalid UiPath vendor type: {vendor_type}")
199
+ raise ValueError(
200
+ f"Invalid vendor type: {vendor_type}, we don't currently have clients that support that api type"
201
+ )
147
202
 
148
203
 
149
204
  def get_embedding_model(
150
- model: str,
205
+ model_name: str,
151
206
  byo_connection_id: str | None = None,
152
207
  client_settings: UiPathBaseSettings | None = None,
153
208
  client_type: Literal["passthrough", "normalized"] = "passthrough",
@@ -172,11 +227,11 @@ def get_embedding_model(
172
227
 
173
228
  Example:
174
229
  >>> settings = get_default_client_settings()
175
- >>> embeddings = get_embedding_model("text-embedding-3-large", settings)
230
+ >>> embeddings = get_embedding_model(model_name="text-embedding-3-large", client_settings=settings)
176
231
  >>> vectors = embeddings.embed_documents(["Hello world"])
177
232
  """
178
233
  client_settings = client_settings or get_default_client_settings()
179
- model_info = _get_model_info(model, client_settings, byo_connection_id)
234
+ model_info = _get_model_info(model_name, client_settings, byo_connection_id)
180
235
 
181
236
  if client_type == "normalized":
182
237
  from uipath_langchain_client.clients.normalized.embeddings import (
@@ -184,26 +239,45 @@ def get_embedding_model(
184
239
  )
185
240
 
186
241
  return UiPathNormalizedEmbeddings(
187
- model=model, client_settings=client_settings, **model_kwargs
242
+ model=model_name,
243
+ settings=client_settings,
244
+ byo_connection_id=byo_connection_id,
245
+ **model_kwargs,
188
246
  )
189
247
 
190
248
  vendor_type = model_info["vendor"].lower()
249
+ is_uipath_owned = model_info.get("modelSubscriptionType") == "UiPathOwned"
191
250
  match vendor_type:
192
251
  case "openai":
193
- from uipath_langchain_client.clients.openai.embeddings import (
194
- UiPathAzureOpenAIEmbeddings,
195
- )
252
+ if is_uipath_owned:
253
+ from uipath_langchain_client.clients.openai.embeddings import (
254
+ UiPathAzureOpenAIEmbeddings,
255
+ )
196
256
 
197
- return UiPathAzureOpenAIEmbeddings(
198
- model=model, client_settings=client_settings, **model_kwargs
199
- )
257
+ return UiPathAzureOpenAIEmbeddings(
258
+ model=model_name, settings=client_settings, **model_kwargs
259
+ )
260
+ else:
261
+ from uipath_langchain_client.clients.openai.embeddings import (
262
+ UiPathOpenAIEmbeddings,
263
+ )
264
+
265
+ return UiPathOpenAIEmbeddings(
266
+ model=model_name,
267
+ settings=client_settings,
268
+ byo_connection_id=byo_connection_id,
269
+ **model_kwargs,
270
+ )
200
271
  case "vertexai":
201
272
  from uipath_langchain_client.clients.google.embeddings import (
202
273
  UiPathGoogleGenerativeAIEmbeddings,
203
274
  )
204
275
 
205
276
  return UiPathGoogleGenerativeAIEmbeddings(
206
- model=model, client_settings=client_settings, **model_kwargs
277
+ model=model_name,
278
+ settings=client_settings,
279
+ byo_connection_id=byo_connection_id,
280
+ **model_kwargs,
207
281
  )
208
282
  case "awsbedrock":
209
283
  from uipath_langchain_client.clients.bedrock.embeddings import (
@@ -211,7 +285,12 @@ def get_embedding_model(
211
285
  )
212
286
 
213
287
  return UiPathBedrockEmbeddings(
214
- model=model, client_settings=client_settings, **model_kwargs
288
+ model=model_name,
289
+ settings=client_settings,
290
+ byo_connection_id=byo_connection_id,
291
+ **model_kwargs,
215
292
  )
216
293
  case _:
217
- raise ValueError(f"Invalid UiPath Embeddings provider: {vendor_type}")
294
+ raise ValueError(
295
+ f"We don't currently have clients that support this provider: {vendor_type}"
296
+ )