uipath-langchain-client 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. uipath_langchain_client-1.0.0/.gitignore +211 -0
  2. uipath_langchain_client-1.0.0/CHANGELOG.md +90 -0
  3. uipath_langchain_client-1.0.0/PKG-INFO +276 -0
  4. uipath_langchain_client-1.0.0/README.md +248 -0
  5. uipath_langchain_client-1.0.0/demo.py +278 -0
  6. uipath_langchain_client-1.0.0/pyproject.toml +43 -0
  7. uipath_langchain_client-1.0.0/src/uipath_langchain_client/__init__.py +50 -0
  8. uipath_langchain_client-1.0.0/src/uipath_langchain_client/__version__.py +3 -0
  9. uipath_langchain_client-1.0.0/src/uipath_langchain_client/base_client.py +277 -0
  10. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/anthropic/__init__.py +3 -0
  11. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/anthropic/chat_models.py +157 -0
  12. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/azure/__init__.py +4 -0
  13. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/azure/chat_models.py +46 -0
  14. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/azure/embeddings.py +46 -0
  15. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/bedrock/__init__.py +7 -0
  16. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/bedrock/chat_models.py +63 -0
  17. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/bedrock/embeddings.py +33 -0
  18. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/bedrock/utils.py +90 -0
  19. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/google/__init__.py +4 -0
  20. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/google/chat_models.py +203 -0
  21. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/google/embeddings.py +45 -0
  22. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/normalized/__init__.py +4 -0
  23. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/normalized/chat_models.py +419 -0
  24. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/normalized/embeddings.py +31 -0
  25. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/openai/__init__.py +15 -0
  26. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/openai/chat_models.py +102 -0
  27. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/openai/embeddings.py +82 -0
  28. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/vertexai/__init__.py +3 -0
  29. uipath_langchain_client-1.0.0/src/uipath_langchain_client/clients/vertexai/chat_models.py +48 -0
  30. uipath_langchain_client-1.0.0/src/uipath_langchain_client/factory.py +217 -0
  31. uipath_langchain_client-1.0.0/src/uipath_langchain_client/settings.py +32 -0
@@ -0,0 +1,211 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[codz]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py.cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # UV
98
+ # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ #uv.lock
102
+
103
+ # poetry
104
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
106
+ # commonly ignored for libraries.
107
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108
+ #poetry.lock
109
+ #poetry.toml
110
+
111
+ # pdm
112
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
113
+ # pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
114
+ # https://pdm-project.org/en/latest/usage/project/#working-with-version-control
115
+ #pdm.lock
116
+ #pdm.toml
117
+ .pdm-python
118
+ .pdm-build/
119
+
120
+ # pixi
121
+ # Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
122
+ #pixi.lock
123
+ # Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
124
+ # in the .venv directory. It is recommended not to include this directory in version control.
125
+ .pixi
126
+
127
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
128
+ __pypackages__/
129
+
130
+ # Celery stuff
131
+ celerybeat-schedule
132
+ celerybeat.pid
133
+
134
+ # SageMath parsed files
135
+ *.sage.py
136
+
137
+ # Environments
138
+ .env
139
+ .envrc
140
+ .venv
141
+ env/
142
+ venv/
143
+ ENV/
144
+ env.bak/
145
+ venv.bak/
146
+
147
+ # Spyder project settings
148
+ .spyderproject
149
+ .spyproject
150
+
151
+ # Rope project settings
152
+ .ropeproject
153
+
154
+ # mkdocs documentation
155
+ /site
156
+
157
+ # mypy
158
+ .mypy_cache/
159
+ .dmypy.json
160
+ dmypy.json
161
+
162
+ # Pyre type checker
163
+ .pyre/
164
+
165
+ # pytype static type analyzer
166
+ .pytype/
167
+
168
+ # Cython debug symbols
169
+ cython_debug/
170
+
171
+ # PyCharm
172
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
173
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
174
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
175
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
176
+ #.idea/
177
+
178
+ # Abstra
179
+ # Abstra is an AI-powered process automation framework.
180
+ # Ignore directories containing user credentials, local state, and settings.
181
+ # Learn more at https://abstra.io/docs
182
+ .abstra/
183
+
184
+ # Visual Studio Code
185
+ # Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
186
+ # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
187
+ # and can be added to the global gitignore or merged into this file. However, if you prefer,
188
+ # you could uncomment the following to ignore the entire vscode folder
189
+ # .vscode/
190
+
191
+ # Ruff stuff:
192
+ .ruff_cache/
193
+
194
+ # PyPI configuration file
195
+ .pypirc
196
+
197
+ # Cursor
198
+ # Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
199
+ # exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
200
+ # refer to https://docs.cursor.com/context/ignore-files
201
+ .cursorignore
202
+ .cursorindexingignore
203
+
204
+ # Marimo
205
+ marimo/_static/
206
+ marimo/_lsp/
207
+ __marimo__/
208
+
209
+ .vscode/
210
+ .DS_Store
211
+ .uipath/
@@ -0,0 +1,90 @@
1
+ # UiPath LangChain Client Changelog
2
+
3
+ All notable changes to `uipath_langchain_client` will be documented in this file.
4
+
5
+ ## [1.0.0] - 2025-01-30
6
+
7
+ ### Official Release
8
+ - First stable release of the UiPath LangChain Client
9
+ - API considered stable; semantic versioning will be followed from this point forward
10
+
11
+ ### Highlights
12
+ - Production-ready LangChain integrations for all major LLM providers
13
+ - Factory functions for automatic vendor detection and model instantiation
14
+ - Full compatibility with LangChain agents, tools, and chains
15
+ - Comprehensive support for chat completions, embeddings, and streaming
16
+ - Seamless integration with both AgentHub and LLMGateway backends
17
+
18
+ ## [0.3.x] - 2025-01-29
19
+
20
+ ### Release
21
+ - First public release of the UiPath LangChain Client
22
+ - Production-ready integration with LangChain ecosystem
23
+
24
+ ### Documentation
25
+ - Complete rewrite of README.md with installation, quick start, and API reference
26
+ - Added comprehensive usage examples for all supported providers
27
+ - Added module-level and class-level docstrings throughout the codebase
28
+
29
+ ### Features
30
+ - Factory functions (`get_chat_model`, `get_embedding_model`) for auto-detecting model vendors
31
+ - Normalized API support for provider-agnostic chat completions and embeddings
32
+ - Full compatibility with LangChain agents and tools
33
+
34
+ ### Supported Providers
35
+ - OpenAI
36
+ - Google
37
+ - Anthropic
38
+ - AWS Bedrock
39
+ - Vertex AI
40
+ - Azure AI
41
+
42
+ ## [0.2.x] - 2025-01-15
43
+
44
+ ### Architecture
45
+ - Extracted from monolithic package into dedicated LangChain integration package
46
+ - Now depends on `uipath_llm_client` core package for HTTP client and authentication
47
+ - Unified client architecture supporting both AgentHub and LLMGateway backends
48
+
49
+ ### Chat Model Classes
50
+ - `UiPathChatOpenAI` - OpenAI models via direct API
51
+ - `UiPathAzureChatOpenAI` - OpenAI models via Azure
52
+ - `UiPathChatGoogleGenerativeAI` - Google Gemini models
53
+ - `UiPathChatAnthropic` - Anthropic Claude models
54
+ - `UiPathChatAnthropicVertex` - Claude models via Google VertexAI
55
+ - `UiPathChatBedrock` - AWS Bedrock models
56
+ - `UiPathChatBedrockConverse` - AWS Bedrock Converse API
57
+ - `UiPathAzureAIChatCompletionsModel` - Azure AI models (non-OpenAI)
58
+ - `UiPathNormalizedChatModel` - Provider-agnostic normalized API
59
+
60
+ ### Embeddings Classes
61
+ - `UiPathOpenAIEmbeddings` - OpenAI embeddings via direct API
62
+ - `UiPathAzureOpenAIEmbeddings` - OpenAI embeddings via Azure
63
+ - `UiPathGoogleGenerativeAIEmbeddings` - Google embeddings
64
+ - `UiPathBedrockEmbeddings` - AWS Bedrock embeddings
65
+ - `UiPathAzureAIEmbeddingsModel` - Azure AI embeddings
66
+ - `UiPathNormalizedEmbeddings` - Provider-agnostic normalized API
67
+
68
+ ### Features
69
+ - Support for BYO (Bring Your Own) model connections
70
+
71
+ ### Breaking Changes
72
+ - Package renamed from internal module to `uipath_langchain_client`
73
+ - Import paths changed; update imports accordingly
74
+
75
+ ## [0.1.x] - 2024-12-20
76
+
77
+ ### Initial Development Release
78
+ - LangChain-compatible chat models wrapping UiPath LLM services
79
+ - Passthrough clients for:
80
+ - OpenAI
81
+ - Google Gemini
82
+ - Anthropic
83
+ - AWS Bedrock
84
+ - Vertex AI
85
+ - Azure AI
86
+ - Embeddings support for text-embedding models
87
+ - Streaming support (sync and async)
88
+ - Tool/function calling support
89
+ - Full compatibility with LangChain's `BaseChatModel` interface
90
+ - httpx-based HTTP handling for consistent behavior
@@ -0,0 +1,276 @@
1
+ Metadata-Version: 2.4
2
+ Name: uipath-langchain-client
3
+ Version: 1.0.0
4
+ Summary: LangChain-compatible chat models and embeddings for UiPath's LLM services
5
+ Requires-Python: >=3.11
6
+ Requires-Dist: langchain>=1.2.7
7
+ Requires-Dist: uipath-llm-client>=1.0.0
8
+ Provides-Extra: all
9
+ Requires-Dist: langchain-anthropic>=1.3.1; extra == 'all'
10
+ Requires-Dist: langchain-aws>=1.2.1; extra == 'all'
11
+ Requires-Dist: langchain-azure-ai>=1.0.0; extra == 'all'
12
+ Requires-Dist: langchain-google-genai>=4.2.0; extra == 'all'
13
+ Requires-Dist: langchain-google-vertexai>=3.2.1; extra == 'all'
14
+ Requires-Dist: langchain-openai>=1.1.7; extra == 'all'
15
+ Provides-Extra: anthropic
16
+ Requires-Dist: langchain-anthropic>=1.3.1; extra == 'anthropic'
17
+ Provides-Extra: aws
18
+ Requires-Dist: langchain-aws>=1.2.1; extra == 'aws'
19
+ Provides-Extra: azure
20
+ Requires-Dist: langchain-azure-ai>=1.0.0; extra == 'azure'
21
+ Provides-Extra: google
22
+ Requires-Dist: langchain-google-genai>=4.2.0; extra == 'google'
23
+ Provides-Extra: openai
24
+ Requires-Dist: langchain-openai>=1.1.7; extra == 'openai'
25
+ Provides-Extra: vertexai
26
+ Requires-Dist: langchain-google-vertexai>=3.2.1; extra == 'vertexai'
27
+ Description-Content-Type: text/markdown
28
+
29
+ # UiPath LangChain Client
30
+
31
+ LangChain-compatible chat models and embeddings for accessing LLMs through UiPath's infrastructure.
32
+
33
+ ## Installation
34
+
35
+ ```bash
36
+ # Base installation (normalized API only)
37
+ pip install uipath-langchain-client
38
+
39
+ # With specific provider extras for passthrough mode
40
+ pip install "uipath-langchain-client[openai]" # OpenAI/Azure models
41
+ pip install "uipath-langchain-client[google]" # Google Gemini models
42
+ pip install "uipath-langchain-client[anthropic]" # Anthropic Claude models
43
+ pip install "uipath-langchain-client[azure]" # Azure AI models
44
+ pip install "uipath-langchain-client[aws]" # AWS Bedrock models
45
+ pip install "uipath-langchain-client[vertexai]" # Google VertexAI models
46
+ pip install "uipath-langchain-client[all]" # All providers
47
+ ```
48
+
49
+ ## Quick Start
50
+
51
+ ### Using Factory Functions (Recommended)
52
+
53
+ The factory functions automatically detect the model vendor and return the appropriate client:
54
+
55
+ ```python
56
+ from uipath_langchain_client import get_chat_model, get_embedding_model
57
+ from uipath_langchain_client.settings import get_default_client_settings
58
+
59
+ # Get default settings (uses UIPATH_LLM_BACKEND env var or defaults to AgentHub)
60
+ settings = get_default_client_settings()
61
+
62
+ # Chat model - vendor auto-detected from model name
63
+ chat_model = get_chat_model(
64
+ model_name="gpt-4o-2024-11-20",
65
+ client_settings=settings,
66
+ )
67
+ response = chat_model.invoke("Hello, how are you?")
68
+ print(response.content)
69
+
70
+ # Embeddings model
71
+ embeddings = get_embedding_model(
72
+ model="text-embedding-3-large",
73
+ client_settings=settings,
74
+ )
75
+ vectors = embeddings.embed_documents(["Hello world"])
76
+ print(f"Embedding dimension: {len(vectors[0])}")
77
+ ```
78
+
79
+ ### Using Direct Client Classes
80
+
81
+ For more control, instantiate provider-specific classes directly:
82
+
83
+ ```python
84
+ from uipath_langchain_client.openai.chat_models import UiPathAzureChatOpenAI
85
+ from uipath_langchain_client.google.chat_models import UiPathChatGoogleGenerativeAI
86
+ from uipath_langchain_client.anthropic.chat_models import UiPathChatAnthropic
87
+ from uipath_langchain_client.normalized.chat_models import UiPathNormalizedChatModel
88
+ from uipath_langchain_client.settings import get_default_client_settings
89
+
90
+ settings = get_default_client_settings()
91
+
92
+ # OpenAI/Azure
93
+ openai_chat = UiPathAzureChatOpenAI(model="gpt-4o-2024-11-20", client_settings=settings)
94
+
95
+ # Google Gemini
96
+ gemini_chat = UiPathChatGoogleGenerativeAI(model="gemini-2.5-flash", client_settings=settings)
97
+
98
+ # Anthropic Claude (via AWS Bedrock)
99
+ claude_chat = UiPathChatAnthropic(
100
+ model="anthropic.claude-sonnet-4-5-20250929-v1:0",
101
+ client_settings=settings,
102
+ vendor_type="awsbedrock",
103
+ )
104
+
105
+ # Normalized (provider-agnostic)
106
+ normalized_chat = UiPathNormalizedChatModel(model="gpt-4o-2024-11-20", client_settings=settings)
107
+ ```
108
+
109
+ ## Available Client Types
110
+
111
+ ### Passthrough Mode (Default)
112
+
113
+ Uses vendor-specific APIs through UiPath's gateway. Full feature parity with native SDKs.
114
+
115
+ | Class | Provider | Models |
116
+ |-------|----------|--------|
117
+ | `UiPathAzureChatOpenAI` | OpenAI/Azure | GPT-4o, GPT-4, GPT-3.5 |
118
+ | `UiPathChatOpenAI` | OpenAI | GPT-4o, GPT-4, GPT-3.5 |
119
+ | `UiPathChatGoogleGenerativeAI` | Google | Gemini 2.5, 2.0, 1.5 |
120
+ | `UiPathChatAnthropic` | Anthropic | Claude Sonnet 4.5, Opus, etc. |
121
+ | `UiPathChatAnthropicVertex` | Anthropic (via VertexAI) | Claude models |
122
+ | `UiPathAzureAIChatCompletionsModel` | Azure AI | Various |
123
+
124
+ ### Normalized Mode
125
+
126
+ Uses UiPath's normalized API for a consistent interface across all providers.
127
+
128
+ | Class | Description |
129
+ |-------|-------------|
130
+ | `UiPathNormalizedChatModel` | Provider-agnostic chat completions |
131
+ | `UiPathNormalizedEmbeddings` | Provider-agnostic embeddings |
132
+
133
+ ## Features
134
+
135
+ ### Streaming
136
+
137
+ ```python
138
+ from uipath_langchain_client import get_chat_model
139
+ from uipath_langchain_client.settings import get_default_client_settings
140
+
141
+ settings = get_default_client_settings()
142
+ chat_model = get_chat_model(model_name="gpt-4o-2024-11-20", client_settings=settings)
143
+
144
+ # Sync streaming
145
+ for chunk in chat_model.stream("Write a haiku about Python"):
146
+ print(chunk.content, end="", flush=True)
147
+
148
+ # Async streaming
149
+ async for chunk in chat_model.astream("Write a haiku about Python"):
150
+ print(chunk.content, end="", flush=True)
151
+ ```
152
+
153
+ ### Tool Calling
154
+
155
+ ```python
156
+ from langchain_core.tools import tool
157
+
158
+ @tool
159
+ def get_weather(city: str) -> str:
160
+ """Get the current weather for a city."""
161
+ return f"Sunny, 72°F in {city}"
162
+
163
+ chat_model = get_chat_model(model_name="gpt-4o-2024-11-20", client_settings=settings)
164
+ model_with_tools = chat_model.bind_tools([get_weather])
165
+
166
+ response = model_with_tools.invoke("What's the weather in Tokyo?")
167
+ print(response.tool_calls)
168
+ ```
169
+
170
+ ### LangGraph Agents
171
+
172
+ ```python
173
+ from langgraph.prebuilt import create_react_agent
174
+ from langchain_core.tools import tool
175
+
176
+ @tool
177
+ def search(query: str) -> str:
178
+ """Search the web."""
179
+ return f"Results for: {query}"
180
+
181
+ chat_model = get_chat_model(model_name="gpt-4o-2024-11-20", client_settings=settings)
182
+ agent = create_react_agent(chat_model, [search])
183
+
184
+ result = agent.invoke({"messages": [("user", "Search for UiPath documentation")]})
185
+ ```
186
+
187
+ ### Extended Thinking (Model-Specific)
188
+
189
+ ```python
190
+ # OpenAI o1/o3 reasoning
191
+ chat_model = get_chat_model(
192
+ model_name="o3-mini",
193
+ client_settings=settings,
194
+ client_type="normalized",
195
+ reasoning_effort="medium", # "low", "medium", "high"
196
+ )
197
+
198
+ # Anthropic Claude thinking
199
+ chat_model = get_chat_model(
200
+ model_name="claude-sonnet-4-5",
201
+ client_settings=settings,
202
+ client_type="normalized",
203
+ thinking={"type": "enabled", "budget_tokens": 10000},
204
+ )
205
+
206
+ # Gemini thinking
207
+ chat_model = get_chat_model(
208
+ model_name="gemini-2.5-pro",
209
+ client_settings=settings,
210
+ client_type="normalized",
211
+ thinking_level="medium",
212
+ include_thoughts=True,
213
+ )
214
+ ```
215
+
216
+ ## Configuration
217
+
218
+ ### Retry Configuration
219
+
220
+ ```python
221
+ # RetryConfig is a TypedDict - all fields are optional with sensible defaults
222
+ retry_config = {
223
+ "initial_delay": 2.0, # Initial delay before first retry
224
+ "max_delay": 60.0, # Maximum delay between retries
225
+ "exp_base": 2.0, # Exponential backoff base
226
+ "jitter": 1.0, # Random jitter to add
227
+ }
228
+
229
+ chat_model = get_chat_model(
230
+ model_name="gpt-4o-2024-11-20",
231
+ client_settings=settings,
232
+ max_retries=3,
233
+ retry_config=retry_config,
234
+ )
235
+ ```
236
+
237
+ ### Request Timeout
238
+
239
+ ```python
240
+ chat_model = get_chat_model(
241
+ model_name="gpt-4o-2024-11-20",
242
+ client_settings=settings,
243
+ request_timeout=120, # Client-side timeout in seconds
244
+ )
245
+ ```
246
+
247
+ ## API Reference
248
+
249
+ ### `get_chat_model()`
250
+
251
+ Factory function to create a chat model.
252
+
253
+ **Parameters:**
254
+ - `model_name` (str): Name of the model (e.g., "gpt-4o-2024-11-20")
255
+ - `client_settings` (UiPathBaseSettings): Client settings for authentication
256
+ - `client_type` (Literal["passthrough", "normalized"]): API mode (default: "passthrough")
257
+ - `**model_kwargs`: Additional arguments passed to the model constructor
258
+
259
+ **Returns:** `BaseChatModel` - A LangChain-compatible chat model
260
+
261
+ ### `get_embedding_model()`
262
+
263
+ Factory function to create an embeddings model.
264
+
265
+ **Parameters:**
266
+ - `model` (str): Name of the model (e.g., "text-embedding-3-large")
267
+ - `client_settings` (UiPathBaseSettings): Client settings for authentication
268
+ - `client_type` (Literal["passthrough", "normalized"]): API mode (default: "passthrough")
269
+ - `**model_kwargs`: Additional arguments passed to the model constructor
270
+
271
+ **Returns:** `Embeddings` - A LangChain-compatible embeddings model
272
+
273
+ ## See Also
274
+
275
+ - [Main README](../../README.md) - Overview and core client documentation
276
+ - [UiPath LLM Client](../../src/uipath_llm_client/) - Low-level HTTP client