langchain-timbr 2.0.2__tar.gz → 2.0.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/PKG-INFO +13 -7
  2. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/README.md +4 -2
  3. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/pyproject.toml +7 -5
  4. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/requirements.txt +3 -2
  5. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/_version.py +2 -2
  6. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/llm_wrapper/llm_wrapper.py +94 -8
  7. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/standard/test_llm_wrapper_optional_params.py +2 -1
  8. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/.github/dependabot.yml +0 -0
  9. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/.github/pull_request_template.md +0 -0
  10. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/.github/workflows/_codespell.yml +0 -0
  11. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/.github/workflows/_fossa.yml +0 -0
  12. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/.github/workflows/install-dependencies-and-run-tests.yml +0 -0
  13. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/.github/workflows/publish.yml +0 -0
  14. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/.gitignore +0 -0
  15. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/LICENSE +0 -0
  16. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/pytest.ini +0 -0
  17. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/__init__.py +0 -0
  18. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/config.py +0 -0
  19. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langchain/__init__.py +0 -0
  20. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langchain/execute_timbr_query_chain.py +0 -0
  21. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langchain/generate_answer_chain.py +0 -0
  22. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langchain/generate_timbr_sql_chain.py +0 -0
  23. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langchain/identify_concept_chain.py +0 -0
  24. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langchain/timbr_sql_agent.py +0 -0
  25. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langchain/validate_timbr_sql_chain.py +0 -0
  26. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langgraph/__init__.py +0 -0
  27. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langgraph/execute_timbr_query_node.py +0 -0
  28. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langgraph/generate_response_node.py +0 -0
  29. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langgraph/generate_timbr_sql_node.py +0 -0
  30. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langgraph/identify_concept_node.py +0 -0
  31. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/langgraph/validate_timbr_query_node.py +0 -0
  32. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/llm_wrapper/timbr_llm_wrapper.py +0 -0
  33. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/timbr_llm_connector.py +0 -0
  34. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/utils/general.py +0 -0
  35. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/utils/prompt_service.py +0 -0
  36. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/utils/temperature_supported_models.json +0 -0
  37. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/utils/timbr_llm_utils.py +0 -0
  38. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/src/langchain_timbr/utils/timbr_utils.py +0 -0
  39. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/README.md +0 -0
  40. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/conftest.py +0 -0
  41. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/integration/test_agent_integration.py +0 -0
  42. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/integration/test_azure_databricks_provider.py +0 -0
  43. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/integration/test_azure_openai_model.py +0 -0
  44. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/integration/test_chain_pipeline.py +0 -0
  45. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/integration/test_jwt_token.py +0 -0
  46. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/integration/test_langchain_chains.py +0 -0
  47. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/integration/test_langgraph_nodes.py +0 -0
  48. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/integration/test_timeout_functionality.py +0 -0
  49. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/standard/conftest.py +0 -0
  50. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/standard/test_chain_documentation.py +0 -0
  51. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/standard/test_connection_validation.py +0 -0
  52. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/standard/test_optional_llm_integration.py +0 -0
  53. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/standard/test_standard_chain_requirements.py +0 -0
  54. {langchain_timbr-2.0.2 → langchain_timbr-2.0.3}/tests/standard/test_unit_tests.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langchain-timbr
3
- Version: 2.0.2
3
+ Version: 2.0.3
4
4
  Summary: LangChain & LangGraph extensions that parse LLM prompts into Timbr semantic SQL and execute them.
5
5
  Project-URL: Homepage, https://github.com/WPSemantix/langchain-timbr
6
6
  Project-URL: Documentation, https://docs.timbr.ai/doc/docs/integration/langchain-sdk/
@@ -31,17 +31,18 @@ Requires-Dist: tiktoken==0.8.0
31
31
  Requires-Dist: transformers>=4.53
32
32
  Provides-Extra: all
33
33
  Requires-Dist: anthropic==0.42.0; extra == 'all'
34
- Requires-Dist: azure-identity==1.16.1; extra == 'all'
34
+ Requires-Dist: azure-identity>=1.16.1; extra == 'all'
35
35
  Requires-Dist: databricks-langchain==0.3.0; (python_version < '3.10') and extra == 'all'
36
36
  Requires-Dist: databricks-langchain==0.7.1; (python_version >= '3.10') and extra == 'all'
37
37
  Requires-Dist: databricks-sdk==0.64.0; extra == 'all'
38
38
  Requires-Dist: google-generativeai==0.8.4; extra == 'all'
39
39
  Requires-Dist: langchain-anthropic>=0.3.1; extra == 'all'
40
40
  Requires-Dist: langchain-google-genai>=2.0.9; extra == 'all'
41
+ Requires-Dist: langchain-google-vertexai>=2.0.28; extra == 'all'
41
42
  Requires-Dist: langchain-openai>=0.3.16; extra == 'all'
42
43
  Requires-Dist: langchain-tests>=0.3.20; extra == 'all'
43
44
  Requires-Dist: openai>=1.77.0; extra == 'all'
44
- Requires-Dist: pyarrow<19.0.0; extra == 'all'
45
+ Requires-Dist: pyarrow<20.0.0,>=19.0.1; extra == 'all'
45
46
  Requires-Dist: pytest==8.3.4; extra == 'all'
46
47
  Requires-Dist: snowflake-snowpark-python>=1.6.0; extra == 'all'
47
48
  Requires-Dist: snowflake>=0.8.0; extra == 'all'
@@ -50,7 +51,7 @@ Provides-Extra: anthropic
50
51
  Requires-Dist: anthropic==0.42.0; extra == 'anthropic'
51
52
  Requires-Dist: langchain-anthropic>=0.3.1; extra == 'anthropic'
52
53
  Provides-Extra: azure-openai
53
- Requires-Dist: azure-identity==1.16.1; extra == 'azure-openai'
54
+ Requires-Dist: azure-identity>=1.16.1; extra == 'azure-openai'
54
55
  Requires-Dist: langchain-openai>=0.3.16; extra == 'azure-openai'
55
56
  Requires-Dist: openai>=1.77.0; extra == 'azure-openai'
56
57
  Provides-Extra: databricks
@@ -59,7 +60,7 @@ Requires-Dist: databricks-langchain==0.7.1; (python_version >= '3.10') and extra
59
60
  Requires-Dist: databricks-sdk==0.64.0; extra == 'databricks'
60
61
  Provides-Extra: dev
61
62
  Requires-Dist: langchain-tests>=0.3.20; extra == 'dev'
62
- Requires-Dist: pyarrow<19.0.0; extra == 'dev'
63
+ Requires-Dist: pyarrow<20.0.0,>=19.0.1; extra == 'dev'
63
64
  Requires-Dist: pytest==8.3.4; extra == 'dev'
64
65
  Requires-Dist: uvicorn==0.34.0; extra == 'dev'
65
66
  Provides-Extra: google
@@ -71,6 +72,9 @@ Requires-Dist: openai>=1.77.0; extra == 'openai'
71
72
  Provides-Extra: snowflake
72
73
  Requires-Dist: snowflake-snowpark-python>=1.6.0; extra == 'snowflake'
73
74
  Requires-Dist: snowflake>=0.8.0; extra == 'snowflake'
75
+ Provides-Extra: vertex-ai
76
+ Requires-Dist: google-generativeai==0.8.4; extra == 'vertex-ai'
77
+ Requires-Dist: langchain-google-vertexai>=2.0.28; extra == 'vertex-ai'
74
78
  Description-Content-Type: text/markdown
75
79
 
76
80
  ![Timbr logo description](https://timbr.ai/wp-content/uploads/2025/01/logotimbrai230125.png)
@@ -89,8 +93,8 @@ Timbr LangChain LLM SDK is a Python SDK that extends LangChain and LangGraph wit
89
93
 
90
94
  ![Timbr LangGraph pipeline](https://docs.timbr.ai/doc/assets/images/timbr-langgraph-fcf8e2eb7e26dc9dfa8b56b62937281e.png)
91
95
 
92
-
93
96
  ## Dependencies
97
+
94
98
  - Access to a timbr-server
95
99
  - Python 3.9.13 or newer
96
100
 
@@ -103,13 +107,15 @@ python -m pip install langchain-timbr
103
107
  ```
104
108
 
105
109
  ### Install with selected LLM providers
106
- #### One of: openai, anthropic, google, azure_openai, snowflake, databricks (or 'all')
110
+
111
+ #### One of: openai, anthropic, google, azure_openai, snowflake, databricks, vertex_ai (or 'all')
107
112
 
108
113
  ```bash
109
114
  python -m pip install 'langchain-timbr[<your selected providers, separated by comma w/o space>]'
110
115
  ```
111
116
 
112
117
  ### Using pip from github
118
+
113
119
  ```bash
114
120
  pip install git+https://github.com/WPSemantix/langchain-timbr
115
121
  ```
@@ -14,8 +14,8 @@ Timbr LangChain LLM SDK is a Python SDK that extends LangChain and LangGraph wit
14
14
 
15
15
  ![Timbr LangGraph pipeline](https://docs.timbr.ai/doc/assets/images/timbr-langgraph-fcf8e2eb7e26dc9dfa8b56b62937281e.png)
16
16
 
17
-
18
17
  ## Dependencies
18
+
19
19
  - Access to a timbr-server
20
20
  - Python 3.9.13 or newer
21
21
 
@@ -28,13 +28,15 @@ python -m pip install langchain-timbr
28
28
  ```
29
29
 
30
30
  ### Install with selected LLM providers
31
- #### One of: openai, anthropic, google, azure_openai, snowflake, databricks (or 'all')
31
+
32
+ #### One of: openai, anthropic, google, azure_openai, snowflake, databricks, vertex_ai (or 'all')
32
33
 
33
34
  ```bash
34
35
  python -m pip install 'langchain-timbr[<your selected providers, separated by comma w/o space>]'
35
36
  ```
36
37
 
37
38
  ### Using pip from github
39
+
38
40
  ```bash
39
41
  pip install git+https://github.com/WPSemantix/langchain-timbr
40
42
  ```
@@ -32,15 +32,16 @@ dependencies = [
32
32
  "pydantic==2.10.4",
33
33
  "pytimbr-api>=2.0.0",
34
34
  "tiktoken==0.8.0",
35
- "transformers>=4.53"
35
+ "transformers>=4.53",
36
36
  ]
37
37
 
38
38
  [project.optional-dependencies]
39
39
  # LLM providers
40
40
  openai = ["openai>=1.77.0", "langchain-openai>=0.3.16"]
41
- azure_openai = ["azure-identity==1.16.1", "openai>=1.77.0", "langchain-openai>=0.3.16"]
41
+ azure_openai = ["azure-identity>=1.16.1", "openai>=1.77.0", "langchain-openai>=0.3.16"]
42
42
  anthropic = ["anthropic==0.42.0", "langchain-anthropic>=0.3.1"]
43
43
  google = ["langchain-google-genai>=2.0.9", "google-generativeai==0.8.4"]
44
+ vertex_ai = ["langchain-google-vertexai>=2.0.28", "google-generativeai==0.8.4"]
44
45
  snowflake = ["snowflake>=0.8.0", "snowflake-snowpark-python>=1.6.0"]
45
46
  databricks = [
46
47
  "databricks-langchain==0.3.0; python_version < '3.10'",
@@ -52,24 +53,25 @@ databricks = [
52
53
  dev = [
53
54
  "pytest==8.3.4",
54
55
  "langchain-tests>=0.3.20",
55
- "pyarrow<19.0.0",
56
+ "pyarrow>=19.0.1,<20.0.0",
56
57
  "uvicorn==0.34.0"
57
58
  ]
58
59
 
59
60
  # All optional dependencies
60
61
  all = [
61
62
  "anthropic==0.42.0",
62
- "azure-identity==1.16.1",
63
+ "azure-identity>=1.16.1",
63
64
  "google-generativeai==0.8.4",
64
65
  "langchain-anthropic>=0.3.1",
65
66
  "openai>=1.77.0",
66
67
  "langchain-openai>=0.3.16",
67
68
  "langchain-google-genai>=2.0.9",
69
+ "langchain-google-vertexai>=2.0.28",
68
70
  "snowflake>=0.8.0",
69
71
  "snowflake-snowpark-python>=1.6.0",
70
72
  "pytest==8.3.4",
71
73
  "langchain-tests>=0.3.20",
72
- "pyarrow<19.0.0",
74
+ "pyarrow>=19.0.1,<20.0.0",
73
75
  "uvicorn==0.34.0",
74
76
  "databricks-langchain==0.3.0; python_version < '3.10'",
75
77
  "databricks-langchain==0.7.1; python_version >= '3.10'",
@@ -1,5 +1,5 @@
1
1
  anthropic==0.42.0
2
- azure-identity==1.16.1
2
+ azure-identity>=1.16.1
3
3
  cryptography>=44.0.3
4
4
  databricks-langchain==0.3.0; python_version < '3.10'
5
5
  databricks-langchain==0.7.1; python_version >= '3.10'
@@ -10,11 +10,12 @@ langchain-anthropic>=0.3.1
10
10
  langchain_community>=0.3.20
11
11
  langchain-core>=0.3.58
12
12
  langchain-google-genai>=2.0.9
13
+ langchain-google-vertexai>=2.0.28
13
14
  langchain-openai>=0.3.16
14
15
  langchain-tests>=0.3.20
15
16
  langgraph>=0.3.20
16
17
  openai>=1.77.0
17
- pyarrow<19.0.0
18
+ pyarrow>=19.0.1,<20.0.0
18
19
  pydantic==2.10.4
19
20
  pytest==8.3.4
20
21
  pytimbr-api>=2.0.0
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '2.0.2'
32
- __version_tuple__ = version_tuple = (2, 0, 2)
31
+ __version__ = version = '2.0.3'
32
+ __version_tuple__ = version_tuple = (2, 0, 3)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -14,6 +14,7 @@ class LlmTypes(Enum):
14
14
  AzureOpenAI = 'azure-openai-chat'
15
15
  Snowflake = 'snowflake-cortex'
16
16
  Databricks = 'chat-databricks'
17
+ VertexAI = 'chat-vertexai'
17
18
  Timbr = 'timbr'
18
19
 
19
20
 
@@ -54,10 +55,7 @@ class LlmWrapper(LLM):
54
55
  # Validation: Ensure we have the required parameters
55
56
  if not selected_llm_type:
56
57
  raise ValueError("llm_type must be provided either as parameter or in config (LLM_TYPE environment variable)")
57
-
58
- if not selected_api_key:
59
- raise ValueError("api_key must be provided either as parameter or in config (LLM_API_KEY environment variable)")
60
-
58
+
61
59
  self.client = self._connect_to_llm(
62
60
  selected_llm_type,
63
61
  selected_api_key,
@@ -80,8 +78,75 @@ class LlmWrapper(LLM):
80
78
  llm_params["temperature"] = config.llm_temperature
81
79
  return llm_params
82
80
 
83
-
84
- def _connect_to_llm(self, llm_type, api_key, model, **llm_params):
81
+ def _try_build_vertexai_credentials(self,params, api_key):
82
+ from google.oauth2 import service_account
83
+ from google.auth import default
84
+
85
+ # Try multiple authentication methods in order of preference
86
+ creds = None
87
+ scope = pop_param_value(params, ['vertex_scope', 'llm_scope', 'scope'], default=config.llm_scope)
88
+ scopes = [scope] if scope else ["https://www.googleapis.com/auth/cloud-platform"]
89
+
90
+ # Method 1: Service Account File (json_path)
91
+ json_path = pop_param_value(params, ['azure_json_path', 'llm_json_path', 'json_path'])
92
+ if json_path:
93
+ try:
94
+ creds = service_account.Credentials.from_service_account_file(
95
+ json_path,
96
+ scopes=scopes,
97
+ )
98
+ except Exception as e:
99
+ raise ValueError(f"Failed to load service account from file '{json_path}': {e}")
100
+
101
+ # Method 2: Service Account Info (as dictionary)
102
+ if not creds:
103
+ service_account_info = pop_param_value(params, ['service_account_info', 'vertex_service_account_info'])
104
+ if service_account_info:
105
+ try:
106
+ creds = service_account.Credentials.from_service_account_info(
107
+ service_account_info,
108
+ scopes=scopes,
109
+ )
110
+ except Exception as e:
111
+ raise ValueError(f"Failed to load service account from info: {e}")
112
+
113
+ # Method 3: Service Account Email + Private Key
114
+ if not creds:
115
+ service_account_email = pop_param_value(params, ['service_account_email', 'vertex_email', 'service_account'])
116
+ private_key = pop_param_value(params, ['private_key', 'vertex_private_key']) or api_key
117
+
118
+ if service_account_email and private_key:
119
+ try:
120
+ service_account_info = {
121
+ "type": "service_account",
122
+ "client_email": service_account_email,
123
+ "private_key": private_key,
124
+ "token_uri": "https://oauth2.googleapis.com/token",
125
+ }
126
+ creds = service_account.Credentials.from_service_account_info(
127
+ service_account_info,
128
+ scopes=scopes,
129
+ )
130
+ except Exception as e:
131
+ raise ValueError(f"Failed to create service account from email and private key: {e}")
132
+
133
+ # Method 4: Default Google Cloud Credentials (fallback)
134
+ if not creds:
135
+ try:
136
+ creds, _ = default(scopes=scopes)
137
+ except Exception as e:
138
+ raise ValueError(
139
+ "VertexAI authentication failed. Please provide one of:\n"
140
+ "1. 'json_path' - path to service account JSON file\n"
141
+ "2. 'service_account_info' - service account info as dictionary\n"
142
+ "3. 'service_account_email' + 'private_key' - service account credentials\n"
143
+ "4. Set up default Google Cloud credentials (gcloud auth application-default login)\n"
144
+ f"Error: {e}"
145
+ )
146
+
147
+ return creds
148
+
149
+ def _connect_to_llm(self, llm_type, api_key = None, model = None, **llm_params):
85
150
  if is_llm_type(llm_type, LlmTypes.OpenAI):
86
151
  from langchain_openai import ChatOpenAI as OpenAI
87
152
  llm_model = model or "gpt-4o-2024-11-20"
@@ -171,6 +236,21 @@ class LlmWrapper(LLM):
171
236
  endpoint=llm_model,
172
237
  workspace_client=w, # Using authenticated client
173
238
  **params,
239
+ )
240
+ elif is_llm_type(llm_type, LlmTypes.VertexAI):
241
+ from langchain_google_vertexai import ChatVertexAI
242
+ llm_model = model or "gemini-2.5-flash-lite"
243
+ params = self._add_temperature(LlmTypes.VertexAI.name, llm_model, **llm_params)
244
+
245
+ project = pop_param_value(params, ['vertex_project', 'llm_project', 'project'])
246
+ if project:
247
+ params['project'] = project
248
+
249
+ creds = self._try_build_vertexai_credentials(params, api_key)
250
+ return ChatVertexAI(
251
+ model_name=llm_model,
252
+ credentials=creds,
253
+ **params,
174
254
  )
175
255
  else:
176
256
  raise ValueError(f"Unsupported LLM type: {llm_type}")
@@ -179,6 +259,8 @@ class LlmWrapper(LLM):
179
259
  def get_model_list(self) -> list[str]:
180
260
  """Return the list of available models for the LLM."""
181
261
  models = []
262
+ llm_type_name = None
263
+
182
264
  try:
183
265
  if is_llm_type(self._llm_type, LlmTypes.OpenAI):
184
266
  from openai import OpenAI
@@ -237,10 +319,14 @@ class LlmWrapper(LLM):
237
319
  models = [ep.name for ep in w.serving_endpoints.list()]
238
320
 
239
321
  # elif self._is_llm_type(self._llm_type, LlmTypes.Timbr):
240
-
322
+ elif is_llm_type(self._llm_type, LlmTypes.VertexAI):
323
+ from google import genai
324
+ if self.client.credentials:
325
+ client = genai.Client(credentials=self.client.credentials, vertexai=True, project=self.client.project, location=self.client.location)
326
+ models = [m.name.split('/')[-1] for m in client.models.list()]
327
+
241
328
  except Exception:
242
329
  # If model list fetching throws an exception, return default value using get_supported_models
243
- llm_type_name = None
244
330
  if hasattr(self, '_llm_type'):
245
331
  # Try to extract the LLM type name from the _llm_type
246
332
  for llm_enum in LlmTypes:
@@ -47,7 +47,8 @@ class TestLlmWrapperOptionalParams:
47
47
  with pytest.raises(ValueError, match="llm_type must be provided"):
48
48
  LlmWrapper(api_key="test-key")
49
49
 
50
- def test_missing_api_key_raises_error(self):
50
+ # This test is deprecated because api_key is now mandatory
51
+ def skip_test_missing_api_key_raises_error(self):
51
52
  """Test that missing api_key raises appropriate error"""
52
53
  with patch.dict(os.environ, {}, clear=True):
53
54
  # Mock the config values to ensure they're None
File without changes