langchain-timbr 2.0.2__py3-none-any.whl → 2.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '2.0.2'
32
- __version_tuple__ = version_tuple = (2, 0, 2)
31
+ __version__ = version = '2.0.4'
32
+ __version_tuple__ = version_tuple = (2, 0, 4)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -14,6 +14,7 @@ class LlmTypes(Enum):
14
14
  AzureOpenAI = 'azure-openai-chat'
15
15
  Snowflake = 'snowflake-cortex'
16
16
  Databricks = 'chat-databricks'
17
+ VertexAI = 'chat-vertexai'
17
18
  Timbr = 'timbr'
18
19
 
19
20
 
@@ -54,10 +55,7 @@ class LlmWrapper(LLM):
54
55
  # Validation: Ensure we have the required parameters
55
56
  if not selected_llm_type:
56
57
  raise ValueError("llm_type must be provided either as parameter or in config (LLM_TYPE environment variable)")
57
-
58
- if not selected_api_key:
59
- raise ValueError("api_key must be provided either as parameter or in config (LLM_API_KEY environment variable)")
60
-
58
+
61
59
  self.client = self._connect_to_llm(
62
60
  selected_llm_type,
63
61
  selected_api_key,
@@ -80,8 +78,75 @@ class LlmWrapper(LLM):
80
78
  llm_params["temperature"] = config.llm_temperature
81
79
  return llm_params
82
80
 
83
-
84
- def _connect_to_llm(self, llm_type, api_key, model, **llm_params):
81
+ def _try_build_vertexai_credentials(self,params, api_key):
82
+ from google.oauth2 import service_account
83
+ from google.auth import default
84
+
85
+ # Try multiple authentication methods in order of preference
86
+ creds = None
87
+ scope = pop_param_value(params, ['vertex_scope', 'llm_scope', 'scope'], default=config.llm_scope)
88
+ scopes = [scope] if scope else ["https://www.googleapis.com/auth/cloud-platform"]
89
+
90
+ # Method 1: Service Account File (json_path)
91
+ json_path = pop_param_value(params, ['azure_json_path', 'llm_json_path', 'json_path'])
92
+ if json_path:
93
+ try:
94
+ creds = service_account.Credentials.from_service_account_file(
95
+ json_path,
96
+ scopes=scopes,
97
+ )
98
+ except Exception as e:
99
+ raise ValueError(f"Failed to load service account from file '{json_path}': {e}")
100
+
101
+ # Method 2: Service Account Info (as dictionary)
102
+ if not creds:
103
+ service_account_info = pop_param_value(params, ['service_account_info', 'vertex_service_account_info'])
104
+ if service_account_info:
105
+ try:
106
+ creds = service_account.Credentials.from_service_account_info(
107
+ service_account_info,
108
+ scopes=scopes,
109
+ )
110
+ except Exception as e:
111
+ raise ValueError(f"Failed to load service account from info: {e}")
112
+
113
+ # Method 3: Service Account Email + Private Key
114
+ if not creds:
115
+ service_account_email = pop_param_value(params, ['service_account_email', 'vertex_email', 'service_account'])
116
+ private_key = pop_param_value(params, ['private_key', 'vertex_private_key']) or api_key
117
+
118
+ if service_account_email and private_key:
119
+ try:
120
+ service_account_info = {
121
+ "type": "service_account",
122
+ "client_email": service_account_email,
123
+ "private_key": private_key,
124
+ "token_uri": "https://oauth2.googleapis.com/token",
125
+ }
126
+ creds = service_account.Credentials.from_service_account_info(
127
+ service_account_info,
128
+ scopes=scopes,
129
+ )
130
+ except Exception as e:
131
+ raise ValueError(f"Failed to create service account from email and private key: {e}")
132
+
133
+ # Method 4: Default Google Cloud Credentials (fallback)
134
+ if not creds:
135
+ try:
136
+ creds, _ = default(scopes=scopes)
137
+ except Exception as e:
138
+ raise ValueError(
139
+ "VertexAI authentication failed. Please provide one of:\n"
140
+ "1. 'json_path' - path to service account JSON file\n"
141
+ "2. 'service_account_info' - service account info as dictionary\n"
142
+ "3. 'service_account_email' + 'private_key' - service account credentials\n"
143
+ "4. Set up default Google Cloud credentials (gcloud auth application-default login)\n"
144
+ f"Error: {e}"
145
+ )
146
+
147
+ return creds
148
+
149
+ def _connect_to_llm(self, llm_type, api_key = None, model = None, **llm_params):
85
150
  if is_llm_type(llm_type, LlmTypes.OpenAI):
86
151
  from langchain_openai import ChatOpenAI as OpenAI
87
152
  llm_model = model or "gpt-4o-2024-11-20"
@@ -171,6 +236,21 @@ class LlmWrapper(LLM):
171
236
  endpoint=llm_model,
172
237
  workspace_client=w, # Using authenticated client
173
238
  **params,
239
+ )
240
+ elif is_llm_type(llm_type, LlmTypes.VertexAI):
241
+ from langchain_google_vertexai import ChatVertexAI
242
+ llm_model = model or "gemini-2.5-flash-lite"
243
+ params = self._add_temperature(LlmTypes.VertexAI.name, llm_model, **llm_params)
244
+
245
+ project = pop_param_value(params, ['vertex_project', 'llm_project', 'project'])
246
+ if project:
247
+ params['project'] = project
248
+
249
+ creds = self._try_build_vertexai_credentials(params, api_key)
250
+ return ChatVertexAI(
251
+ model_name=llm_model,
252
+ credentials=creds,
253
+ **params,
174
254
  )
175
255
  else:
176
256
  raise ValueError(f"Unsupported LLM type: {llm_type}")
@@ -179,6 +259,8 @@ class LlmWrapper(LLM):
179
259
  def get_model_list(self) -> list[str]:
180
260
  """Return the list of available models for the LLM."""
181
261
  models = []
262
+ llm_type_name = None
263
+
182
264
  try:
183
265
  if is_llm_type(self._llm_type, LlmTypes.OpenAI):
184
266
  from openai import OpenAI
@@ -237,10 +319,14 @@ class LlmWrapper(LLM):
237
319
  models = [ep.name for ep in w.serving_endpoints.list()]
238
320
 
239
321
  # elif self._is_llm_type(self._llm_type, LlmTypes.Timbr):
240
-
322
+ elif is_llm_type(self._llm_type, LlmTypes.VertexAI):
323
+ from google import genai
324
+ if self.client.credentials:
325
+ client = genai.Client(credentials=self.client.credentials, vertexai=True, project=self.client.project, location=self.client.location)
326
+ models = [m.name.split('/')[-1] for m in client.models.list()]
327
+
241
328
  except Exception:
242
329
  # If model list fetching throws an exception, return default value using get_supported_models
243
- llm_type_name = None
244
330
  if hasattr(self, '_llm_type'):
245
331
  # Try to extract the LLM type name from the _llm_type
246
332
  for llm_enum in LlmTypes:
@@ -138,29 +138,30 @@ def _prompt_to_string(prompt: Any) -> str:
138
138
  def _calculate_token_count(llm: LLM, prompt: str) -> int:
139
139
  """
140
140
  Calculate the token count for a given prompt text using the specified LLM.
141
- Falls back to tiktoken if the LLM doesn't support token counting.
141
+ Falls back to basic if the LLM doesn't support token counting.
142
142
  """
143
+ import tiktoken
143
144
  token_count = 0
145
+
146
+ encoding = None
144
147
  try:
145
- if hasattr(llm, "get_num_tokens_from_messages"):
146
- token_count = llm.get_num_tokens_from_messages(prompt)
148
+ if hasattr(llm, 'client') and hasattr(llm.client, 'model_name'):
149
+ encoding = tiktoken.encoding_for_model(llm.client.model_name)
147
150
  except Exception as e:
148
- #print(f"Error with primary token counting: {e}")
151
+ print(f"Error with primary token counting: {e}")
149
152
  pass
150
153
 
151
- # Use tiktoken as fallback if token_count is still 0
152
- if token_count == 0:
153
- try:
154
- import tiktoken
154
+ try:
155
+ if encoding is None:
155
156
  encoding = tiktoken.get_encoding("cl100k_base")
156
- if isinstance(prompt, str):
157
- token_count = len(encoding.encode(prompt))
158
- else:
159
- prompt_text = _prompt_to_string(prompt)
160
- token_count = len(encoding.encode(prompt_text))
161
- except Exception as e2:
162
- #print(f"Error calculating token count with fallback method: {e2}")
163
- pass
157
+ if isinstance(prompt, str):
158
+ token_count = len(encoding.encode(prompt))
159
+ else:
160
+ prompt_text = _prompt_to_string(prompt)
161
+ token_count = len(encoding.encode(prompt_text))
162
+ except Exception as e2:
163
+ #print(f"Error calculating token count with fallback method: {e2}")
164
+ pass
164
165
 
165
166
  return token_count
166
167
 
@@ -0,0 +1,217 @@
1
+ Metadata-Version: 2.4
2
+ Name: langchain-timbr
3
+ Version: 2.0.4
4
+ Summary: LangChain & LangGraph extensions that parse LLM prompts into Timbr semantic SQL and execute them.
5
+ Project-URL: Homepage, https://github.com/WPSemantix/langchain-timbr
6
+ Project-URL: Documentation, https://docs.timbr.ai/doc/docs/integration/langchain-sdk/
7
+ Project-URL: Source, https://github.com/WPSemantix/langchain-timbr
8
+ Project-URL: Issues, https://github.com/WPSemantix/langchain-timbr/issues
9
+ Author-email: "Timbr.ai" <contact@timbr.ai>
10
+ License: MIT
11
+ License-File: LICENSE
12
+ Keywords: Agents,Knowledge Graph,LLM,LangChain,LangGraph,SQL,Semantic Layer,Timbr
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3 :: Only
17
+ Classifier: Programming Language :: Python :: 3.10
18
+ Classifier: Programming Language :: Python :: 3.11
19
+ Classifier: Programming Language :: Python :: 3.12
20
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
21
+ Requires-Python: <3.13,>=3.10
22
+ Requires-Dist: anthropic==0.42.0
23
+ Requires-Dist: azure-identity==1.25.0; python_version >= '3.11'
24
+ Requires-Dist: azure-identity>=1.16.1; python_version == '3.10'
25
+ Requires-Dist: cryptography==45.0.7; python_version >= '3.11'
26
+ Requires-Dist: cryptography>=44.0.3; python_version == '3.10'
27
+ Requires-Dist: databricks-langchain==0.7.1
28
+ Requires-Dist: databricks-sdk==0.64.0
29
+ Requires-Dist: google-generativeai==0.8.4
30
+ Requires-Dist: langchain-anthropic==0.3.5; python_version >= '3.11'
31
+ Requires-Dist: langchain-anthropic>=0.3.1; python_version == '3.10'
32
+ Requires-Dist: langchain-community==0.3.30; python_version >= '3.11'
33
+ Requires-Dist: langchain-community>=0.3.20; python_version == '3.10'
34
+ Requires-Dist: langchain-core==0.3.78; python_version >= '3.11'
35
+ Requires-Dist: langchain-core>=0.3.58; python_version == '3.10'
36
+ Requires-Dist: langchain-google-genai==2.0.10; python_version >= '3.11'
37
+ Requires-Dist: langchain-google-genai>=2.0.9; python_version == '3.10'
38
+ Requires-Dist: langchain-google-vertexai==2.1.2; python_version >= '3.11'
39
+ Requires-Dist: langchain-google-vertexai>=2.0.28; python_version == '3.10'
40
+ Requires-Dist: langchain-openai==0.3.34; python_version >= '3.11'
41
+ Requires-Dist: langchain-openai>=0.3.16; python_version == '3.10'
42
+ Requires-Dist: langchain-tests==0.3.22; python_version >= '3.11'
43
+ Requires-Dist: langchain-tests>=0.3.20; python_version == '3.10'
44
+ Requires-Dist: langchain==0.3.27; python_version >= '3.11'
45
+ Requires-Dist: langchain>=0.3.25; python_version == '3.10'
46
+ Requires-Dist: langgraph==0.6.8; python_version >= '3.11'
47
+ Requires-Dist: langgraph>=0.3.20; python_version == '3.10'
48
+ Requires-Dist: numpy==1.26.4; python_version >= '3.11'
49
+ Requires-Dist: numpy~=1.26.2; python_version == '3.10'
50
+ Requires-Dist: openai==2.1.0; python_version >= '3.11'
51
+ Requires-Dist: openai>=1.77.0; python_version == '3.10'
52
+ Requires-Dist: pyarrow<20.0.0,>=19.0.1; python_version == '3.10'
53
+ Requires-Dist: pyarrow==19.0.1; python_version >= '3.11'
54
+ Requires-Dist: pydantic==2.10.4
55
+ Requires-Dist: pytest==8.3.4
56
+ Requires-Dist: pytimbr-api==2.0.0; python_version >= '3.11'
57
+ Requires-Dist: pytimbr-api>=2.0.0; python_version == '3.10'
58
+ Requires-Dist: snowflake-snowpark-python==1.39.1; python_version >= '3.11'
59
+ Requires-Dist: snowflake-snowpark-python>=1.39.1; python_version == '3.10'
60
+ Requires-Dist: snowflake==1.8.0; python_version >= '3.11'
61
+ Requires-Dist: snowflake>=1.8.0; python_version == '3.10'
62
+ Requires-Dist: tiktoken==0.8.0
63
+ Requires-Dist: transformers==4.57.0; python_version >= '3.11'
64
+ Requires-Dist: transformers>=4.53; python_version == '3.10'
65
+ Requires-Dist: uvicorn==0.34.0
66
+ Provides-Extra: all
67
+ Requires-Dist: anthropic==0.42.0; extra == 'all'
68
+ Requires-Dist: azure-identity==1.25.0; (python_version >= '3.11') and extra == 'all'
69
+ Requires-Dist: azure-identity>=1.16.1; (python_version == '3.10') and extra == 'all'
70
+ Requires-Dist: databricks-langchain==0.7.1; extra == 'all'
71
+ Requires-Dist: databricks-sdk==0.64.0; extra == 'all'
72
+ Requires-Dist: google-generativeai==0.8.4; extra == 'all'
73
+ Requires-Dist: langchain-anthropic==0.3.5; (python_version >= '3.11') and extra == 'all'
74
+ Requires-Dist: langchain-anthropic>=0.3.1; (python_version == '3.10') and extra == 'all'
75
+ Requires-Dist: langchain-google-genai==2.0.10; (python_version >= '3.11') and extra == 'all'
76
+ Requires-Dist: langchain-google-genai>=2.0.9; (python_version == '3.10') and extra == 'all'
77
+ Requires-Dist: langchain-google-vertexai==2.1.2; (python_version >= '3.11') and extra == 'all'
78
+ Requires-Dist: langchain-google-vertexai>=2.0.28; (python_version == '3.10') and extra == 'all'
79
+ Requires-Dist: langchain-openai==0.3.34; (python_version >= '3.11') and extra == 'all'
80
+ Requires-Dist: langchain-openai>=0.3.16; (python_version == '3.10') and extra == 'all'
81
+ Requires-Dist: langchain-tests==0.3.22; (python_version >= '3.11') and extra == 'all'
82
+ Requires-Dist: langchain-tests>=0.3.20; (python_version == '3.10') and extra == 'all'
83
+ Requires-Dist: openai==2.1.0; (python_version >= '3.11') and extra == 'all'
84
+ Requires-Dist: openai>=1.77.0; (python_version == '3.10') and extra == 'all'
85
+ Requires-Dist: pyarrow<20.0.0,>=19.0.1; (python_version == '3.10') and extra == 'all'
86
+ Requires-Dist: pyarrow==19.0.1; (python_version >= '3.11') and extra == 'all'
87
+ Requires-Dist: pytest==8.3.4; extra == 'all'
88
+ Requires-Dist: snowflake-snowpark-python==1.39.1; (python_version >= '3.11') and extra == 'all'
89
+ Requires-Dist: snowflake-snowpark-python>=1.39.1; (python_version == '3.10') and extra == 'all'
90
+ Requires-Dist: snowflake==1.8.0; (python_version >= '3.11') and extra == 'all'
91
+ Requires-Dist: snowflake>=1.8.0; (python_version == '3.10') and extra == 'all'
92
+ Requires-Dist: uvicorn==0.34.0; extra == 'all'
93
+ Provides-Extra: anthropic
94
+ Requires-Dist: anthropic==0.42.0; extra == 'anthropic'
95
+ Requires-Dist: langchain-anthropic==0.3.5; (python_version >= '3.11') and extra == 'anthropic'
96
+ Requires-Dist: langchain-anthropic>=0.3.1; (python_version == '3.10') and extra == 'anthropic'
97
+ Provides-Extra: azure-openai
98
+ Requires-Dist: azure-identity==1.25.0; (python_version >= '3.11') and extra == 'azure-openai'
99
+ Requires-Dist: azure-identity>=1.16.1; (python_version == '3.10') and extra == 'azure-openai'
100
+ Requires-Dist: langchain-openai==0.3.34; (python_version >= '3.11') and extra == 'azure-openai'
101
+ Requires-Dist: langchain-openai>=0.3.16; (python_version == '3.10') and extra == 'azure-openai'
102
+ Requires-Dist: openai==2.1.0; (python_version >= '3.11') and extra == 'azure-openai'
103
+ Requires-Dist: openai>=1.77.0; (python_version == '3.10') and extra == 'azure-openai'
104
+ Provides-Extra: databricks
105
+ Requires-Dist: databricks-langchain==0.7.1; extra == 'databricks'
106
+ Requires-Dist: databricks-sdk==0.64.0; extra == 'databricks'
107
+ Provides-Extra: dev
108
+ Requires-Dist: langchain-tests==0.3.22; (python_version >= '3.11') and extra == 'dev'
109
+ Requires-Dist: langchain-tests>=0.3.20; (python_version == '3.10') and extra == 'dev'
110
+ Requires-Dist: pyarrow<20.0.0,>=19.0.1; (python_version == '3.10') and extra == 'dev'
111
+ Requires-Dist: pyarrow==19.0.1; (python_version >= '3.11') and extra == 'dev'
112
+ Requires-Dist: pytest==8.3.4; extra == 'dev'
113
+ Requires-Dist: uvicorn==0.34.0; extra == 'dev'
114
+ Provides-Extra: google
115
+ Requires-Dist: google-generativeai==0.8.4; extra == 'google'
116
+ Requires-Dist: langchain-google-genai==2.0.10; (python_version >= '3.11') and extra == 'google'
117
+ Requires-Dist: langchain-google-genai>=2.0.9; (python_version == '3.10') and extra == 'google'
118
+ Provides-Extra: openai
119
+ Requires-Dist: langchain-openai==0.3.34; (python_version >= '3.11') and extra == 'openai'
120
+ Requires-Dist: langchain-openai>=0.3.16; (python_version == '3.10') and extra == 'openai'
121
+ Requires-Dist: openai==2.1.0; (python_version >= '3.11') and extra == 'openai'
122
+ Requires-Dist: openai>=1.77.0; (python_version == '3.10') and extra == 'openai'
123
+ Provides-Extra: snowflake
124
+ Requires-Dist: snowflake-snowpark-python==1.39.1; (python_version >= '3.11') and extra == 'snowflake'
125
+ Requires-Dist: snowflake-snowpark-python>=1.39.1; (python_version == '3.10') and extra == 'snowflake'
126
+ Requires-Dist: snowflake==1.8.0; (python_version >= '3.11') and extra == 'snowflake'
127
+ Requires-Dist: snowflake>=1.8.0; (python_version == '3.10') and extra == 'snowflake'
128
+ Provides-Extra: vertex-ai
129
+ Requires-Dist: google-generativeai==0.8.4; extra == 'vertex-ai'
130
+ Requires-Dist: langchain-google-vertexai==2.1.2; (python_version >= '3.11') and extra == 'vertex-ai'
131
+ Requires-Dist: langchain-google-vertexai>=2.0.28; (python_version == '3.10') and extra == 'vertex-ai'
132
+ Description-Content-Type: text/markdown
133
+
134
+ ![Timbr logo description](https://timbr.ai/wp-content/uploads/2025/01/logotimbrai230125.png)
135
+
136
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr.svg?type=shield&issueType=security)](https://app.fossa.com/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr?ref=badge_shield&issueType=security)
137
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr.svg?type=shield&issueType=license)](https://app.fossa.com/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr?ref=badge_shield&issueType=license)
138
+
139
+
140
+ [![Python 3.10](https://img.shields.io/badge/python-3.10-blue.svg)](https://www.python.org/downloads/release/python-31017/)
141
+ [![Python 3.11](https://img.shields.io/badge/python-3.11-blue.svg)](https://www.python.org/downloads/release/python-31112/)
142
+ [![Python 3.12](https://img.shields.io/badge/python-3.12-blue.svg)](https://www.python.org/downloads/release/python-3129/)
143
+
144
+ # Timbr LangChain LLM SDK
145
+
146
+ Timbr LangChain LLM SDK is a Python SDK that extends LangChain and LangGraph with custom agents, chains, and nodes for seamless integration with the Timbr semantic layer. It enables converting natural language prompts into optimized semantic-SQL queries and executing them directly against your data.
147
+
148
+ ![Timbr LangGraph pipeline](https://docs.timbr.ai/doc/assets/images/timbr-langgraph-fcf8e2eb7e26dc9dfa8b56b62937281e.png)
149
+
150
+ ## Dependencies
151
+
152
+ - Access to a timbr-server
153
+ - Python 3.10 or newer
154
+
155
+ ## Installation
156
+
157
+ ### Using pip
158
+
159
+ ```bash
160
+ python -m pip install langchain-timbr
161
+ ```
162
+
163
+ ### Install with selected LLM providers
164
+
165
+ #### One of: openai, anthropic, google, azure_openai, snowflake, databricks, vertex_ai (or 'all')
166
+
167
+ ```bash
168
+ python -m pip install 'langchain-timbr[<your selected providers, separated by comma w/o space>]'
169
+ ```
170
+
171
+ ### Using pip from github
172
+
173
+ ```bash
174
+ pip install git+https://github.com/WPSemantix/langchain-timbr
175
+ ```
176
+
177
+ ## Documentation
178
+
179
+ For comprehensive documentation and usage examples, please visit:
180
+
181
+ - [Timbr LangChain Documentation](https://docs.timbr.ai/doc/docs/integration/langchain-sdk)
182
+ - [Timbr LangGraph Documentation](https://docs.timbr.ai/doc/docs/integration/langgraph-sdk)
183
+
184
+ ## Configuration
185
+
186
+ The SDK uses environment variables for configuration. All configurations are optional - when set, they serve as default values for `langchain-timbr` provided tools. Below are all available configuration options:
187
+
188
+ ### Configuration Options
189
+
190
+ #### Timbr Connection Settings
191
+
192
+ - **`TIMBR_URL`** - The URL of your Timbr server
193
+ - **`TIMBR_TOKEN`** - Authentication token for accessing the Timbr server
194
+ - **`TIMBR_ONTOLOGY`** - The ontology to use (also accepts `ONTOLOGY` as an alias)
195
+ - **`IS_JWT`** - Whether the token is a JWT token (true/false)
196
+ - **`JWT_TENANT_ID`** - Tenant ID for JWT authentication
197
+
198
+ #### Cache and Data Processing
199
+
200
+ - **`CACHE_TIMEOUT`** - Timeout for caching operations in seconds
201
+ - **`IGNORE_TAGS`** - Comma-separated list of tags to ignore during processing
202
+ - **`IGNORE_TAGS_PREFIX`** - Comma-separated list of tag prefixes to ignore during processing
203
+
204
+ #### LLM Configuration
205
+
206
+ - **`LLM_TYPE`** - The type of LLM provider to use
207
+ - **`LLM_MODEL`** - The specific model to use with the LLM provider
208
+ - **`LLM_API_KEY`** - API key or client secret for the LLM provider
209
+ - **`LLM_TEMPERATURE`** - Temperature setting for LLM responses (controls randomness)
210
+ - **`LLM_ADDITIONAL_PARAMS`** - Additional parameters to pass to the LLM
211
+ - **`LLM_TIMEOUT`** - Timeout for LLM requests in seconds
212
+ - **`LLM_TENANT_ID`** - LLM provider tenant/directory ID (Used for Service Principal authentication)
213
+ - **`LLM_CLIENT_ID`** - LLM provider client ID (Used for Service Principal authentication)
214
+ - **`LLM_CLIENT_SECRET`** - LLM provider client secret (Used for Service Principal authentication)
215
+ - **`LLM_ENDPOINT`** - LLM provider OpenAI endpoint URL
216
+ - **`LLM_API_VERSION`** - LLM provider API version
217
+ - **`LLM_SCOPE`** - LLM provider authentication scope
@@ -1,5 +1,5 @@
1
1
  langchain_timbr/__init__.py,sha256=gxd6Y6QDmYZtPlYVdXtPIy501hMOZXHjWh2qq4qzt_s,828
2
- langchain_timbr/_version.py,sha256=UFd-y4dR-HVHWzUJlgmJ3T05hh9C4Q-la-D9QTIoZw0,704
2
+ langchain_timbr/_version.py,sha256=922NdOI3zEr3qq7FLo7Lgu6-ppZZIIPboyCzU-pnARo,704
3
3
  langchain_timbr/config.py,sha256=PEtvNgvnA9UseZJjKgup_O6xdG-VYk3N11nH8p8W1Kg,1410
4
4
  langchain_timbr/timbr_llm_connector.py,sha256=1jDicBZkW7CKB-PvQiQ1_AMXYm9JJHaoNaPqy54nhh8,13096
5
5
  langchain_timbr/langchain/__init__.py,sha256=ejcsZKP9PK0j4WrrCCcvBXpDpP-TeRiVb21OIUJqix8,580
@@ -15,14 +15,14 @@ langchain_timbr/langgraph/generate_response_node.py,sha256=BLmsDZfbhncRpO7PEfDpy
15
15
  langchain_timbr/langgraph/generate_timbr_sql_node.py,sha256=wkau-NajblSVzNIro9IyqawULvz3XaCYSEdYW95vWco,4911
16
16
  langchain_timbr/langgraph/identify_concept_node.py,sha256=aiLDFEcz_vM4zZ_ULe1SvJKmI-e4Fb2SibZQaEPz_eY,3649
17
17
  langchain_timbr/langgraph/validate_timbr_query_node.py,sha256=-2fuieCz1hv6ua-17zfonme8LQ_OoPnoOBTdGSXkJgs,4793
18
- langchain_timbr/llm_wrapper/llm_wrapper.py,sha256=QoTKx_VKd2ToglX53Z49e71xoJUEH6-pRBVOzjURxCo,11320
18
+ langchain_timbr/llm_wrapper/llm_wrapper.py,sha256=xLBTy2E6mTDny5tSSxIeeMUffos1r3a2BkBAjn3zxO4,14935
19
19
  langchain_timbr/llm_wrapper/timbr_llm_wrapper.py,sha256=sDqDOz0qu8b4WWlagjNceswMVyvEJ8yBWZq2etBh-T0,1362
20
20
  langchain_timbr/utils/general.py,sha256=MtY-ZExKJrcBzV3EQNn6G1ESKpiQB2hJCp95BrUayUo,5707
21
21
  langchain_timbr/utils/prompt_service.py,sha256=QT7kiq72rQno77z1-tvGGD7HlH_wdTQAl_1teSoKEv4,11373
22
22
  langchain_timbr/utils/temperature_supported_models.json,sha256=d3UmBUpG38zDjjB42IoGpHTUaf0pHMBRSPY99ao1a3g,1832
23
- langchain_timbr/utils/timbr_llm_utils.py,sha256=7-nnTa1T9XOcgIb-aJP3Pgon_gOrCMnDPiIPiAT3UCg,23016
23
+ langchain_timbr/utils/timbr_llm_utils.py,sha256=m-RzPePiCzTQCVMcrbD5hlzQZ-kPrnG2_4uOa5DL9AU,22959
24
24
  langchain_timbr/utils/timbr_utils.py,sha256=p21DwTGhF4iKTLDQBkeBaJDFcXt-Hpu1ij8xzQt00Ng,16958
25
- langchain_timbr-2.0.2.dist-info/METADATA,sha256=VtT8woei5BJOA6rvMtpJublCk7nwDiQ4j0gfCww7n90,7771
26
- langchain_timbr-2.0.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
27
- langchain_timbr-2.0.2.dist-info/licenses/LICENSE,sha256=0ITGFk2alkC7-e--bRGtuzDrv62USIiVyV2Crf3_L_0,1065
28
- langchain_timbr-2.0.2.dist-info/RECORD,,
25
+ langchain_timbr-2.0.4.dist-info/METADATA,sha256=r5f9N6-YWS7vx_nWOPqsgDpjr5Zy7IBw5HzBt0ONIqM,12508
26
+ langchain_timbr-2.0.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
27
+ langchain_timbr-2.0.4.dist-info/licenses/LICENSE,sha256=0ITGFk2alkC7-e--bRGtuzDrv62USIiVyV2Crf3_L_0,1065
28
+ langchain_timbr-2.0.4.dist-info/RECORD,,
@@ -1,157 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: langchain-timbr
3
- Version: 2.0.2
4
- Summary: LangChain & LangGraph extensions that parse LLM prompts into Timbr semantic SQL and execute them.
5
- Project-URL: Homepage, https://github.com/WPSemantix/langchain-timbr
6
- Project-URL: Documentation, https://docs.timbr.ai/doc/docs/integration/langchain-sdk/
7
- Project-URL: Source, https://github.com/WPSemantix/langchain-timbr
8
- Project-URL: Issues, https://github.com/WPSemantix/langchain-timbr/issues
9
- Author-email: "Timbr.ai" <contact@timbr.ai>
10
- License: MIT
11
- License-File: LICENSE
12
- Keywords: Agents,Knowledge Graph,LLM,LangChain,LangGraph,SQL,Semantic Layer,Timbr
13
- Classifier: Intended Audience :: Developers
14
- Classifier: License :: OSI Approved :: MIT License
15
- Classifier: Programming Language :: Python :: 3
16
- Classifier: Programming Language :: Python :: 3 :: Only
17
- Classifier: Programming Language :: Python :: 3.9
18
- Classifier: Programming Language :: Python :: 3.10
19
- Classifier: Programming Language :: Python :: 3.11
20
- Classifier: Programming Language :: Python :: 3.12
21
- Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
22
- Requires-Python: <3.13,>=3.9
23
- Requires-Dist: cryptography>=44.0.3
24
- Requires-Dist: langchain-community>=0.3.20
25
- Requires-Dist: langchain-core>=0.3.58
26
- Requires-Dist: langchain>=0.3.25
27
- Requires-Dist: langgraph>=0.3.20
28
- Requires-Dist: pydantic==2.10.4
29
- Requires-Dist: pytimbr-api>=2.0.0
30
- Requires-Dist: tiktoken==0.8.0
31
- Requires-Dist: transformers>=4.53
32
- Provides-Extra: all
33
- Requires-Dist: anthropic==0.42.0; extra == 'all'
34
- Requires-Dist: azure-identity==1.16.1; extra == 'all'
35
- Requires-Dist: databricks-langchain==0.3.0; (python_version < '3.10') and extra == 'all'
36
- Requires-Dist: databricks-langchain==0.7.1; (python_version >= '3.10') and extra == 'all'
37
- Requires-Dist: databricks-sdk==0.64.0; extra == 'all'
38
- Requires-Dist: google-generativeai==0.8.4; extra == 'all'
39
- Requires-Dist: langchain-anthropic>=0.3.1; extra == 'all'
40
- Requires-Dist: langchain-google-genai>=2.0.9; extra == 'all'
41
- Requires-Dist: langchain-openai>=0.3.16; extra == 'all'
42
- Requires-Dist: langchain-tests>=0.3.20; extra == 'all'
43
- Requires-Dist: openai>=1.77.0; extra == 'all'
44
- Requires-Dist: pyarrow<19.0.0; extra == 'all'
45
- Requires-Dist: pytest==8.3.4; extra == 'all'
46
- Requires-Dist: snowflake-snowpark-python>=1.6.0; extra == 'all'
47
- Requires-Dist: snowflake>=0.8.0; extra == 'all'
48
- Requires-Dist: uvicorn==0.34.0; extra == 'all'
49
- Provides-Extra: anthropic
50
- Requires-Dist: anthropic==0.42.0; extra == 'anthropic'
51
- Requires-Dist: langchain-anthropic>=0.3.1; extra == 'anthropic'
52
- Provides-Extra: azure-openai
53
- Requires-Dist: azure-identity==1.16.1; extra == 'azure-openai'
54
- Requires-Dist: langchain-openai>=0.3.16; extra == 'azure-openai'
55
- Requires-Dist: openai>=1.77.0; extra == 'azure-openai'
56
- Provides-Extra: databricks
57
- Requires-Dist: databricks-langchain==0.3.0; (python_version < '3.10') and extra == 'databricks'
58
- Requires-Dist: databricks-langchain==0.7.1; (python_version >= '3.10') and extra == 'databricks'
59
- Requires-Dist: databricks-sdk==0.64.0; extra == 'databricks'
60
- Provides-Extra: dev
61
- Requires-Dist: langchain-tests>=0.3.20; extra == 'dev'
62
- Requires-Dist: pyarrow<19.0.0; extra == 'dev'
63
- Requires-Dist: pytest==8.3.4; extra == 'dev'
64
- Requires-Dist: uvicorn==0.34.0; extra == 'dev'
65
- Provides-Extra: google
66
- Requires-Dist: google-generativeai==0.8.4; extra == 'google'
67
- Requires-Dist: langchain-google-genai>=2.0.9; extra == 'google'
68
- Provides-Extra: openai
69
- Requires-Dist: langchain-openai>=0.3.16; extra == 'openai'
70
- Requires-Dist: openai>=1.77.0; extra == 'openai'
71
- Provides-Extra: snowflake
72
- Requires-Dist: snowflake-snowpark-python>=1.6.0; extra == 'snowflake'
73
- Requires-Dist: snowflake>=0.8.0; extra == 'snowflake'
74
- Description-Content-Type: text/markdown
75
-
76
- ![Timbr logo description](https://timbr.ai/wp-content/uploads/2025/01/logotimbrai230125.png)
77
-
78
- [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr.svg?type=shield&issueType=security)](https://app.fossa.com/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr?ref=badge_shield&issueType=security)
79
- [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr.svg?type=shield&issueType=license)](https://app.fossa.com/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr?ref=badge_shield&issueType=license)
80
-
81
- [![Python 3.9](https://img.shields.io/badge/python-3.9-blue)](https://www.python.org/downloads/release/python-3921/)
82
- [![Python 3.10](https://img.shields.io/badge/python-3.10-blue.svg)](https://www.python.org/downloads/release/python-31017/)
83
- [![Python 3.11](https://img.shields.io/badge/python-3.11-blue.svg)](https://www.python.org/downloads/release/python-31112/)
84
- [![Python 3.12](https://img.shields.io/badge/python-3.12-blue.svg)](https://www.python.org/downloads/release/python-3129/)
85
-
86
- # Timbr LangChain LLM SDK
87
-
88
- Timbr LangChain LLM SDK is a Python SDK that extends LangChain and LangGraph with custom agents, chains, and nodes for seamless integration with the Timbr semantic layer. It enables converting natural language prompts into optimized semantic-SQL queries and executing them directly against your data.
89
-
90
- ![Timbr LangGraph pipeline](https://docs.timbr.ai/doc/assets/images/timbr-langgraph-fcf8e2eb7e26dc9dfa8b56b62937281e.png)
91
-
92
-
93
- ## Dependencies
94
- - Access to a timbr-server
95
- - Python 3.9.13 or newer
96
-
97
- ## Installation
98
-
99
- ### Using pip
100
-
101
- ```bash
102
- python -m pip install langchain-timbr
103
- ```
104
-
105
- ### Install with selected LLM providers
106
- #### One of: openai, anthropic, google, azure_openai, snowflake, databricks (or 'all')
107
-
108
- ```bash
109
- python -m pip install 'langchain-timbr[<your selected providers, separated by comma w/o space>]'
110
- ```
111
-
112
- ### Using pip from github
113
- ```bash
114
- pip install git+https://github.com/WPSemantix/langchain-timbr
115
- ```
116
-
117
- ## Documentation
118
-
119
- For comprehensive documentation and usage examples, please visit:
120
-
121
- - [Timbr LangChain Documentation](https://docs.timbr.ai/doc/docs/integration/langchain-sdk)
122
- - [Timbr LangGraph Documentation](https://docs.timbr.ai/doc/docs/integration/langgraph-sdk)
123
-
124
- ## Configuration
125
-
126
- The SDK uses environment variables for configuration. All configurations are optional - when set, they serve as default values for `langchain-timbr` provided tools. Below are all available configuration options:
127
-
128
- ### Configuration Options
129
-
130
- #### Timbr Connection Settings
131
-
132
- - **`TIMBR_URL`** - The URL of your Timbr server
133
- - **`TIMBR_TOKEN`** - Authentication token for accessing the Timbr server
134
- - **`TIMBR_ONTOLOGY`** - The ontology to use (also accepts `ONTOLOGY` as an alias)
135
- - **`IS_JWT`** - Whether the token is a JWT token (true/false)
136
- - **`JWT_TENANT_ID`** - Tenant ID for JWT authentication
137
-
138
- #### Cache and Data Processing
139
-
140
- - **`CACHE_TIMEOUT`** - Timeout for caching operations in seconds
141
- - **`IGNORE_TAGS`** - Comma-separated list of tags to ignore during processing
142
- - **`IGNORE_TAGS_PREFIX`** - Comma-separated list of tag prefixes to ignore during processing
143
-
144
- #### LLM Configuration
145
-
146
- - **`LLM_TYPE`** - The type of LLM provider to use
147
- - **`LLM_MODEL`** - The specific model to use with the LLM provider
148
- - **`LLM_API_KEY`** - API key or client secret for the LLM provider
149
- - **`LLM_TEMPERATURE`** - Temperature setting for LLM responses (controls randomness)
150
- - **`LLM_ADDITIONAL_PARAMS`** - Additional parameters to pass to the LLM
151
- - **`LLM_TIMEOUT`** - Timeout for LLM requests in seconds
152
- - **`LLM_TENANT_ID`** - LLM provider tenant/directory ID (Used for Service Principal authentication)
153
- - **`LLM_CLIENT_ID`** - LLM provider client ID (Used for Service Principal authentication)
154
- - **`LLM_CLIENT_SECRET`** - LLM provider client secret (Used for Service Principal authentication)
155
- - **`LLM_ENDPOINT`** - LLM provider OpenAI endpoint URL
156
- - **`LLM_API_VERSION`** - LLM provider API version
157
- - **`LLM_SCOPE`** - LLM provider authentication scope