langchain-timbr 2.0.0__tar.gz → 2.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/PKG-INFO +11 -3
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/README.md +8 -2
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/pyproject.toml +2 -1
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/requirements.txt +1 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/_version.py +2 -2
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/config.py +8 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/llm_wrapper/llm_wrapper.py +63 -31
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/timbr_llm_connector.py +10 -18
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/utils/general.py +28 -4
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/integration/test_azure_openai_model.py +42 -2
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/standard/test_llm_wrapper_optional_params.py +14 -14
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/standard/test_optional_llm_integration.py +16 -16
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/.github/dependabot.yml +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/.github/pull_request_template.md +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/.github/workflows/_codespell.yml +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/.github/workflows/_fossa.yml +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/.github/workflows/install-dependencies-and-run-tests.yml +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/.github/workflows/publish.yml +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/.gitignore +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/LICENSE +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/pytest.ini +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/__init__.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langchain/__init__.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langchain/execute_timbr_query_chain.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langchain/generate_answer_chain.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langchain/generate_timbr_sql_chain.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langchain/identify_concept_chain.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langchain/timbr_sql_agent.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langchain/validate_timbr_sql_chain.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langgraph/__init__.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langgraph/execute_timbr_query_node.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langgraph/generate_response_node.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langgraph/generate_timbr_sql_node.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langgraph/identify_concept_node.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langgraph/validate_timbr_query_node.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/llm_wrapper/timbr_llm_wrapper.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/utils/prompt_service.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/utils/temperature_supported_models.json +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/utils/timbr_llm_utils.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/utils/timbr_utils.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/README.md +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/conftest.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/integration/test_agent_integration.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/integration/test_azure_databricks_provider.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/integration/test_chain_pipeline.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/integration/test_jwt_token.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/integration/test_langchain_chains.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/integration/test_langgraph_nodes.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/integration/test_timeout_functionality.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/standard/conftest.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/standard/test_chain_documentation.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/standard/test_connection_validation.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/standard/test_standard_chain_requirements.py +0 -0
- {langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/standard/test_unit_tests.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: langchain-timbr
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.1
|
|
4
4
|
Summary: LangChain & LangGraph extensions that parse LLM prompts into Timbr semantic SQL and execute them.
|
|
5
5
|
Project-URL: Homepage, https://github.com/WPSemantix/langchain-timbr
|
|
6
6
|
Project-URL: Documentation, https://docs.timbr.ai/doc/docs/integration/langchain-sdk/
|
|
@@ -31,6 +31,7 @@ Requires-Dist: tiktoken==0.8.0
|
|
|
31
31
|
Requires-Dist: transformers>=4.53
|
|
32
32
|
Provides-Extra: all
|
|
33
33
|
Requires-Dist: anthropic==0.42.0; extra == 'all'
|
|
34
|
+
Requires-Dist: azure-identity==1.16.1; extra == 'all'
|
|
34
35
|
Requires-Dist: databricks-langchain==0.3.0; (python_version < '3.10') and extra == 'all'
|
|
35
36
|
Requires-Dist: databricks-langchain==0.7.1; (python_version >= '3.10') and extra == 'all'
|
|
36
37
|
Requires-Dist: databricks-sdk==0.64.0; extra == 'all'
|
|
@@ -49,6 +50,7 @@ Provides-Extra: anthropic
|
|
|
49
50
|
Requires-Dist: anthropic==0.42.0; extra == 'anthropic'
|
|
50
51
|
Requires-Dist: langchain-anthropic>=0.3.1; extra == 'anthropic'
|
|
51
52
|
Provides-Extra: azure-openai
|
|
53
|
+
Requires-Dist: azure-identity==1.16.1; extra == 'azure-openai'
|
|
52
54
|
Requires-Dist: langchain-openai>=0.3.16; extra == 'azure-openai'
|
|
53
55
|
Requires-Dist: openai>=1.77.0; extra == 'azure-openai'
|
|
54
56
|
Provides-Extra: databricks
|
|
@@ -104,7 +106,7 @@ python -m pip install langchain-timbr
|
|
|
104
106
|
#### One of: openai, anthropic, google, azure_openai, snowflake, databricks (or 'all')
|
|
105
107
|
|
|
106
108
|
```bash
|
|
107
|
-
python -m pip install 'langchain-timbr[<your selected providers, separated by comma w/o space]'
|
|
109
|
+
python -m pip install 'langchain-timbr[<your selected providers, separated by comma w/o space>]'
|
|
108
110
|
```
|
|
109
111
|
|
|
110
112
|
### Using pip from github
|
|
@@ -143,7 +145,13 @@ The SDK uses environment variables for configuration. All configurations are opt
|
|
|
143
145
|
|
|
144
146
|
- **`LLM_TYPE`** - The type of LLM provider to use
|
|
145
147
|
- **`LLM_MODEL`** - The specific model to use with the LLM provider
|
|
146
|
-
- **`LLM_API_KEY`** - API key for the LLM provider
|
|
148
|
+
- **`LLM_API_KEY`** - API key or client secret for the LLM provider
|
|
147
149
|
- **`LLM_TEMPERATURE`** - Temperature setting for LLM responses (controls randomness)
|
|
148
150
|
- **`LLM_ADDITIONAL_PARAMS`** - Additional parameters to pass to the LLM
|
|
149
151
|
- **`LLM_TIMEOUT`** - Timeout for LLM requests in seconds
|
|
152
|
+
- **`LLM_TENANT_ID`** - LLM provider tenant/directory ID (Used for Service Principal authentication)
|
|
153
|
+
- **`LLM_CLIENT_ID`** - LLM provider client ID (Used for Service Principal authentication)
|
|
154
|
+
- **`LLM_CLIENT_SECRET`** - LLM provider client secret (Used for Service Principal authentication)
|
|
155
|
+
- **`LLM_ENDPOINT`** - LLM provider OpenAI endpoint URL
|
|
156
|
+
- **`LLM_API_VERSION`** - LLM provider API version
|
|
157
|
+
- **`LLM_SCOPE`** - LLM provider authentication scope
|
|
@@ -31,7 +31,7 @@ python -m pip install langchain-timbr
|
|
|
31
31
|
#### One of: openai, anthropic, google, azure_openai, snowflake, databricks (or 'all')
|
|
32
32
|
|
|
33
33
|
```bash
|
|
34
|
-
python -m pip install 'langchain-timbr[<your selected providers, separated by comma w/o space]'
|
|
34
|
+
python -m pip install 'langchain-timbr[<your selected providers, separated by comma w/o space>]'
|
|
35
35
|
```
|
|
36
36
|
|
|
37
37
|
### Using pip from github
|
|
@@ -70,7 +70,13 @@ The SDK uses environment variables for configuration. All configurations are opt
|
|
|
70
70
|
|
|
71
71
|
- **`LLM_TYPE`** - The type of LLM provider to use
|
|
72
72
|
- **`LLM_MODEL`** - The specific model to use with the LLM provider
|
|
73
|
-
- **`LLM_API_KEY`** - API key for the LLM provider
|
|
73
|
+
- **`LLM_API_KEY`** - API key or client secret for the LLM provider
|
|
74
74
|
- **`LLM_TEMPERATURE`** - Temperature setting for LLM responses (controls randomness)
|
|
75
75
|
- **`LLM_ADDITIONAL_PARAMS`** - Additional parameters to pass to the LLM
|
|
76
76
|
- **`LLM_TIMEOUT`** - Timeout for LLM requests in seconds
|
|
77
|
+
- **`LLM_TENANT_ID`** - LLM provider tenant/directory ID (Used for Service Principal authentication)
|
|
78
|
+
- **`LLM_CLIENT_ID`** - LLM provider client ID (Used for Service Principal authentication)
|
|
79
|
+
- **`LLM_CLIENT_SECRET`** - LLM provider client secret (Used for Service Principal authentication)
|
|
80
|
+
- **`LLM_ENDPOINT`** - LLM provider OpenAI endpoint URL
|
|
81
|
+
- **`LLM_API_VERSION`** - LLM provider API version
|
|
82
|
+
- **`LLM_SCOPE`** - LLM provider authentication scope
|
|
@@ -38,7 +38,7 @@ dependencies = [
|
|
|
38
38
|
[project.optional-dependencies]
|
|
39
39
|
# LLM providers
|
|
40
40
|
openai = ["openai>=1.77.0", "langchain-openai>=0.3.16"]
|
|
41
|
-
azure_openai = ["openai>=1.77.0", "langchain-openai>=0.3.16"]
|
|
41
|
+
azure_openai = ["azure-identity==1.16.1", "openai>=1.77.0", "langchain-openai>=0.3.16"]
|
|
42
42
|
anthropic = ["anthropic==0.42.0", "langchain-anthropic>=0.3.1"]
|
|
43
43
|
google = ["langchain-google-genai>=2.0.9", "google-generativeai==0.8.4"]
|
|
44
44
|
snowflake = ["snowflake>=0.8.0", "snowflake-snowpark-python>=1.6.0"]
|
|
@@ -59,6 +59,7 @@ dev = [
|
|
|
59
59
|
# All optional dependencies
|
|
60
60
|
all = [
|
|
61
61
|
"anthropic==0.42.0",
|
|
62
|
+
"azure-identity==1.16.1",
|
|
62
63
|
"google-generativeai==0.8.4",
|
|
63
64
|
"langchain-anthropic>=0.3.1",
|
|
64
65
|
"openai>=1.77.0",
|
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '2.0.
|
|
32
|
-
__version_tuple__ = version_tuple = (2, 0,
|
|
31
|
+
__version__ = version = '2.0.1'
|
|
32
|
+
__version_tuple__ = version_tuple = (2, 0, 1)
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
|
@@ -20,3 +20,11 @@ llm_api_key = os.environ.get('LLM_API_KEY')
|
|
|
20
20
|
llm_temperature = os.environ.get('LLM_TEMPERATURE', 0.0)
|
|
21
21
|
llm_additional_params = os.environ.get('LLM_ADDITIONAL_PARAMS', '')
|
|
22
22
|
llm_timeout = to_integer(os.environ.get('LLM_TIMEOUT', 60)) # Default 60 seconds timeout
|
|
23
|
+
|
|
24
|
+
# Optional for Azure OpenAI with Service Principal authentication
|
|
25
|
+
llm_tenant_id = os.environ.get('LLM_TENANT_ID', None)
|
|
26
|
+
llm_client_id = os.environ.get('LLM_CLIENT_ID', None)
|
|
27
|
+
llm_client_secret = os.environ.get('LLM_CLIENT_SECRET', None)
|
|
28
|
+
llm_endpoint = os.environ.get('LLM_ENDPOINT', None)
|
|
29
|
+
llm_api_version = os.environ.get('LLM_API_VERSION', None)
|
|
30
|
+
llm_scope = os.environ.get('LLM_SCOPE', "https://cognitiveservices.azure.com/.default") # e.g. "api://<your-client-id>/.default"
|
{langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/llm_wrapper/llm_wrapper.py
RENAMED
|
@@ -4,8 +4,8 @@ from langchain.llms.base import LLM
|
|
|
4
4
|
from pydantic import Field
|
|
5
5
|
|
|
6
6
|
from .timbr_llm_wrapper import TimbrLlmWrapper
|
|
7
|
-
from ..utils.general import is_llm_type, is_support_temperature, get_supported_models, parse_additional_params
|
|
8
|
-
from ..
|
|
7
|
+
from ..utils.general import is_llm_type, is_support_temperature, get_supported_models, parse_additional_params, pop_param_value
|
|
8
|
+
from .. import config
|
|
9
9
|
|
|
10
10
|
class LlmTypes(Enum):
|
|
11
11
|
OpenAI = 'openai-chat'
|
|
@@ -42,13 +42,13 @@ class LlmWrapper(LLM):
|
|
|
42
42
|
"""
|
|
43
43
|
super().__init__()
|
|
44
44
|
|
|
45
|
-
selected_llm_type = llm_type or
|
|
46
|
-
selected_api_key = api_key or
|
|
47
|
-
selected_model = model or
|
|
45
|
+
selected_llm_type = llm_type or config.llm_type
|
|
46
|
+
selected_api_key = api_key or config.llm_api_key or config.llm_client_secret
|
|
47
|
+
selected_model = model or config.llm_model
|
|
48
48
|
selected_additional_params = llm_params.pop('additional_params', None)
|
|
49
49
|
|
|
50
50
|
# Parse additional parameters from init params or config and merge with provided params
|
|
51
|
-
default_additional_params = parse_additional_params(selected_additional_params or
|
|
51
|
+
default_additional_params = parse_additional_params(selected_additional_params or config.llm_additional_params or {})
|
|
52
52
|
additional_llm_params = {**default_additional_params, **llm_params}
|
|
53
53
|
|
|
54
54
|
# Validation: Ensure we have the required parameters
|
|
@@ -76,8 +76,8 @@ class LlmWrapper(LLM):
|
|
|
76
76
|
Add temperature to the LLM parameters if the LLM model supports it.
|
|
77
77
|
"""
|
|
78
78
|
if "temperature" not in llm_params:
|
|
79
|
-
if llm_temperature is not None and is_support_temperature(llm_type, llm_model):
|
|
80
|
-
llm_params["temperature"] = llm_temperature
|
|
79
|
+
if config.llm_temperature is not None and is_support_temperature(llm_type, llm_model):
|
|
80
|
+
llm_params["temperature"] = config.llm_temperature
|
|
81
81
|
return llm_params
|
|
82
82
|
|
|
83
83
|
|
|
@@ -127,15 +127,36 @@ class LlmWrapper(LLM):
|
|
|
127
127
|
)
|
|
128
128
|
elif is_llm_type(llm_type, LlmTypes.AzureOpenAI):
|
|
129
129
|
from langchain_openai import AzureChatOpenAI
|
|
130
|
-
azure_endpoint = params.pop('azure_endpoint', None)
|
|
131
|
-
azure_api_version = params.pop('azure_openai_api_version', None)
|
|
132
130
|
llm_model = model or "gpt-4o-2024-11-20"
|
|
133
131
|
params = self._add_temperature(LlmTypes.AzureOpenAI.name, llm_model, **llm_params)
|
|
132
|
+
|
|
133
|
+
azure_endpoint = pop_param_value(params, ['azure_endpoint', 'llm_endpoint'], default=config.llm_endpoint)
|
|
134
|
+
azure_api_version = pop_param_value(params, ['azure_api_version', 'llm_api_version'], default=config.llm_api_version)
|
|
135
|
+
|
|
136
|
+
azure_client_id = pop_param_value(params, ['azure_client_id', 'llm_client_id'], default=config.llm_client_id)
|
|
137
|
+
azure_tenant_id = pop_param_value(params, ['azure_tenant_id', 'llm_tenant_id'], default=config.llm_tenant_id)
|
|
138
|
+
if azure_tenant_id and azure_client_id:
|
|
139
|
+
from azure.identity import ClientSecretCredential, get_bearer_token_provider
|
|
140
|
+
azure_client_secret = pop_param_value(params, ['azure_client_secret', 'llm_client_secret'], default=api_key)
|
|
141
|
+
scope = pop_param_value(params, ['azure_scope', 'llm_scope'], default=config.llm_scope)
|
|
142
|
+
credential = ClientSecretCredential(
|
|
143
|
+
tenant_id=azure_tenant_id,
|
|
144
|
+
client_id=azure_client_id,
|
|
145
|
+
client_secret=azure_client_secret
|
|
146
|
+
)
|
|
147
|
+
token_provider = get_bearer_token_provider(credential, scope)
|
|
148
|
+
params['azure_ad_token_provider'] = token_provider
|
|
149
|
+
else:
|
|
150
|
+
params['open_api_key'] = api_key
|
|
151
|
+
|
|
152
|
+
if 'openai_api_version' not in params or not params['openai_api_version']:
|
|
153
|
+
params['openai_api_version'] = azure_api_version
|
|
154
|
+
|
|
155
|
+
if 'azure_endpoint' not in params or not params['azure_endpoint']:
|
|
156
|
+
params['azure_endpoint'] = azure_endpoint
|
|
157
|
+
|
|
134
158
|
return AzureChatOpenAI(
|
|
135
|
-
openai_api_key=api_key,
|
|
136
159
|
azure_deployment=llm_model,
|
|
137
|
-
azure_endpoint=azure_endpoint,
|
|
138
|
-
openai_api_version=azure_api_version,
|
|
139
160
|
**params,
|
|
140
161
|
)
|
|
141
162
|
elif is_llm_type(llm_type, LlmTypes.Databricks):
|
|
@@ -174,16 +195,28 @@ class LlmWrapper(LLM):
|
|
|
174
195
|
elif is_llm_type(self._llm_type, LlmTypes.AzureOpenAI):
|
|
175
196
|
from openai import AzureOpenAI
|
|
176
197
|
# Get Azure-specific attributes from the client
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
198
|
+
api_key = None
|
|
199
|
+
azure_ad_token_provider = None
|
|
200
|
+
azure_endpoint = getattr(self.client, 'azure_endpoint', config.llm_endpoint)
|
|
201
|
+
api_version = getattr(self.client, 'openai_api_version', config.llm_api_version)
|
|
202
|
+
|
|
203
|
+
params = {
|
|
204
|
+
"azure_endpoint": azure_endpoint,
|
|
205
|
+
"api_version": api_version,
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
api_key = getattr(self.client.openai_api_key, '_secret_value', None)
|
|
209
|
+
if api_key:
|
|
210
|
+
params['api_key'] = api_key
|
|
211
|
+
else:
|
|
212
|
+
azure_ad_token_provider = getattr(self.client, 'azure_ad_token_provider', None)
|
|
213
|
+
if azure_ad_token_provider:
|
|
214
|
+
params['azure_ad_token_provider'] = azure_ad_token_provider
|
|
215
|
+
else:
|
|
216
|
+
raise ValueError("Azure OpenAI requires either an API key or an Azure AD token provider for authentication.")
|
|
217
|
+
|
|
218
|
+
if azure_endpoint and api_version and (api_key or azure_ad_token_provider):
|
|
219
|
+
client = AzureOpenAI(**params)
|
|
187
220
|
# For Azure, get the deployments instead of models
|
|
188
221
|
try:
|
|
189
222
|
models = [model.id for model in client.models.list()]
|
|
@@ -199,9 +232,10 @@ class LlmWrapper(LLM):
|
|
|
199
232
|
"llama3.1-405b"
|
|
200
233
|
]
|
|
201
234
|
elif is_llm_type(self._llm_type, LlmTypes.Databricks):
|
|
202
|
-
w = self.client
|
|
203
|
-
|
|
204
|
-
|
|
235
|
+
w = getattr(self.client, 'workspace_client', None)
|
|
236
|
+
if w:
|
|
237
|
+
models = [ep.name for ep in w.serving_endpoints.list()]
|
|
238
|
+
|
|
205
239
|
# elif self._is_llm_type(self._llm_type, LlmTypes.Timbr):
|
|
206
240
|
|
|
207
241
|
except Exception:
|
|
@@ -213,11 +247,9 @@ class LlmWrapper(LLM):
|
|
|
213
247
|
if is_llm_type(self._llm_type, llm_enum):
|
|
214
248
|
llm_type_name = llm_enum.name
|
|
215
249
|
break
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
else:
|
|
220
|
-
models = []
|
|
250
|
+
|
|
251
|
+
if len(models) == 0 and llm_type_name:
|
|
252
|
+
models = get_supported_models(llm_type_name)
|
|
221
253
|
|
|
222
254
|
return sorted(models)
|
|
223
255
|
|
|
@@ -10,15 +10,7 @@ from .langchain import IdentifyTimbrConceptChain, GenerateTimbrSqlChain, Validat
|
|
|
10
10
|
from .langgraph import GenerateTimbrSqlNode, ValidateSemanticSqlNode, ExecuteSemanticQueryNode, GenerateResponseNode
|
|
11
11
|
|
|
12
12
|
|
|
13
|
-
from .
|
|
14
|
-
url as default_url,
|
|
15
|
-
token as default_token,
|
|
16
|
-
ontology as default_ontology,
|
|
17
|
-
llm_type,
|
|
18
|
-
llm_model,
|
|
19
|
-
llm_api_key,
|
|
20
|
-
llm_temperature,
|
|
21
|
-
)
|
|
13
|
+
from . import config
|
|
22
14
|
|
|
23
15
|
class TimbrLanggraphState(TypedDict):
|
|
24
16
|
prompt: str
|
|
@@ -35,9 +27,9 @@ class TimbrLlmConnector:
|
|
|
35
27
|
def __init__(
|
|
36
28
|
self,
|
|
37
29
|
llm: LLM,
|
|
38
|
-
url: Optional[str] =
|
|
39
|
-
token: Optional[str] =
|
|
40
|
-
ontology: Optional[str] =
|
|
30
|
+
url: Optional[str] = config.url,
|
|
31
|
+
token: Optional[str] = config.token,
|
|
32
|
+
ontology: Optional[str] = config.ontology,
|
|
41
33
|
max_limit: Optional[int] = 500,
|
|
42
34
|
verify_ssl: Optional[bool] = True,
|
|
43
35
|
is_jwt: Optional[bool] = False,
|
|
@@ -97,15 +89,15 @@ class TimbrLlmConnector:
|
|
|
97
89
|
|
|
98
90
|
if llm is not None:
|
|
99
91
|
self._llm = llm
|
|
100
|
-
elif llm_type is not None and llm_api_key is not None:
|
|
92
|
+
elif config.llm_type is not None and config.llm_api_key is not None:
|
|
101
93
|
llm_params = {}
|
|
102
|
-
if llm_temperature is not None:
|
|
103
|
-
llm_params["temperature"] = llm_temperature
|
|
94
|
+
if config.llm_temperature is not None:
|
|
95
|
+
llm_params["temperature"] = config.llm_temperature
|
|
104
96
|
|
|
105
97
|
self._llm = LlmWrapper(
|
|
106
|
-
llm_type=llm_type,
|
|
107
|
-
api_key=llm_api_key,
|
|
108
|
-
model=llm_model,
|
|
98
|
+
llm_type=config.llm_type,
|
|
99
|
+
api_key=config.llm_api_key,
|
|
100
|
+
model=config.llm_model,
|
|
109
101
|
**llm_params,
|
|
110
102
|
)
|
|
111
103
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import os
|
|
2
|
-
from typing import Optional
|
|
2
|
+
from typing import Any, Optional, Union
|
|
3
3
|
import json
|
|
4
4
|
|
|
5
5
|
### A global helper functions to use across the project
|
|
@@ -43,7 +43,7 @@ def parse_additional_params(value) -> dict:
|
|
|
43
43
|
"""
|
|
44
44
|
try:
|
|
45
45
|
if isinstance(value, dict):
|
|
46
|
-
return value
|
|
46
|
+
return {k.lower(): v for k, v in value.items()}
|
|
47
47
|
elif isinstance(value, str) and value.strip():
|
|
48
48
|
# Try to parse as JSON first
|
|
49
49
|
stripped_value = value.strip()
|
|
@@ -58,10 +58,10 @@ def parse_additional_params(value) -> dict:
|
|
|
58
58
|
for pair in (value.split('&') if '&' in value else value.split(',')):
|
|
59
59
|
if '=' in pair:
|
|
60
60
|
key, val = pair.split('=', 1)
|
|
61
|
-
params[key.strip()] = val.strip()
|
|
61
|
+
params[key.strip().lower()] = val.strip()
|
|
62
62
|
elif ':' in pair:
|
|
63
63
|
key, val = pair.split(':', 1)
|
|
64
|
-
params[key.strip()] = val.strip()
|
|
64
|
+
params[key.strip().lower()] = val.strip()
|
|
65
65
|
return params
|
|
66
66
|
return {}
|
|
67
67
|
except Exception as e:
|
|
@@ -138,3 +138,27 @@ def get_supported_models(llm_type: str) -> list[str]:
|
|
|
138
138
|
except (FileNotFoundError, json.JSONDecodeError):
|
|
139
139
|
return []
|
|
140
140
|
|
|
141
|
+
|
|
142
|
+
def pop_param_value(
|
|
143
|
+
params_dict: dict,
|
|
144
|
+
opt_keys: Union[str, list[str]],
|
|
145
|
+
default: Any=None,
|
|
146
|
+
):
|
|
147
|
+
"""
|
|
148
|
+
Retrieve the value for the first matching key from params_dict.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
params_dict (dict): Dictionary to search for keys
|
|
152
|
+
opt_keys (str or list[str]): Key or list of keys to look for
|
|
153
|
+
default: Default value to return if no keys are found
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
The value corresponding to the first found key, or default if none found.
|
|
157
|
+
"""
|
|
158
|
+
if isinstance(opt_keys, str):
|
|
159
|
+
opt_keys = [opt_keys]
|
|
160
|
+
|
|
161
|
+
for key in opt_keys:
|
|
162
|
+
if key in params_dict:
|
|
163
|
+
return params_dict.pop(key)
|
|
164
|
+
return default
|
{langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/integration/test_azure_openai_model.py
RENAMED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from langchain_openai import AzureChatOpenAI
|
|
2
|
-
from langchain_timbr import ExecuteTimbrQueryChain
|
|
2
|
+
from langchain_timbr import LlmWrapper, LlmTypes, ExecuteTimbrQueryChain
|
|
3
3
|
|
|
4
4
|
|
|
5
5
|
class TestAzureOpenAIModel:
|
|
@@ -98,4 +98,44 @@ class TestAzureOpenAIModel:
|
|
|
98
98
|
assert isinstance(result["rows"], list), "'rows' should be a list"
|
|
99
99
|
assert result["sql"], "SQL should be present in the result"
|
|
100
100
|
|
|
101
|
-
|
|
101
|
+
def skip_test_azure_service_principal_with_client_and_secret(self, llm, config):
|
|
102
|
+
"""Test Azure OpenAI model integration using service principal with client ID and secret."""
|
|
103
|
+
ontology = "timbr_crunchbase"
|
|
104
|
+
|
|
105
|
+
AZURE_LLM_ENDPOINT = "<Azure OpenAI endpoint>"
|
|
106
|
+
AZURE_API_VERSION = "2024-12-01-preview"
|
|
107
|
+
AZURE_CLIENT_ID = "..."
|
|
108
|
+
AZURE_TENANT_ID = "..."
|
|
109
|
+
AZURE_CLIENT_SECRET = "..."
|
|
110
|
+
LLM_MODEL = "gpt-4o"
|
|
111
|
+
|
|
112
|
+
llm_instance = LlmWrapper(
|
|
113
|
+
llm_type=LlmTypes.AzureOpenAI,
|
|
114
|
+
api_key=AZURE_CLIENT_SECRET,
|
|
115
|
+
model=LLM_MODEL,
|
|
116
|
+
azure_endpoint=AZURE_LLM_ENDPOINT,
|
|
117
|
+
azure_api_version=AZURE_API_VERSION,
|
|
118
|
+
azure_client_id=AZURE_CLIENT_ID,
|
|
119
|
+
azure_tenant_id=AZURE_TENANT_ID,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
# models = llm_instance.get_model_list()
|
|
123
|
+
# print("Available models:", models)
|
|
124
|
+
|
|
125
|
+
chain = ExecuteTimbrQueryChain(
|
|
126
|
+
llm=llm_instance,
|
|
127
|
+
url=config['timbr_url'],
|
|
128
|
+
token=config['timbr_token'],
|
|
129
|
+
ontology=ontology,
|
|
130
|
+
views_list=["org1", "person"],
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
inputs = {
|
|
134
|
+
"prompt": "who is Mark Zuckerberg",
|
|
135
|
+
}
|
|
136
|
+
result = chain.invoke(inputs)
|
|
137
|
+
|
|
138
|
+
print("ExecuteTimbrQueryChain result:", result)
|
|
139
|
+
assert "rows" in result, "Result should contain 'rows'"
|
|
140
|
+
assert isinstance(result["rows"], list), "'rows' should be a list"
|
|
141
|
+
assert result["sql"], "SQL should be present in the result"
|
{langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/standard/test_llm_wrapper_optional_params.py
RENAMED
|
@@ -27,9 +27,9 @@ class TestLlmWrapperOptionalParams:
|
|
|
27
27
|
def test_with_config_fallback(self):
|
|
28
28
|
"""Test that config fallback works"""
|
|
29
29
|
# Mock the config values directly
|
|
30
|
-
with patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
31
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
32
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
30
|
+
with patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_type', 'openai-chat'),\
|
|
31
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', 'test-key-from-config'),\
|
|
32
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_model', 'gpt-4-from-config'):
|
|
33
33
|
try:
|
|
34
34
|
wrapper = LlmWrapper() # No parameters provided
|
|
35
35
|
assert wrapper is not None
|
|
@@ -42,8 +42,8 @@ class TestLlmWrapperOptionalParams:
|
|
|
42
42
|
"""Test that missing llm_type raises appropriate error"""
|
|
43
43
|
with patch.dict(os.environ, {}, clear=True):
|
|
44
44
|
# Mock the config values to ensure they're None
|
|
45
|
-
with patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
46
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
45
|
+
with patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_type', None), \
|
|
46
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', 'test-key'):
|
|
47
47
|
with pytest.raises(ValueError, match="llm_type must be provided"):
|
|
48
48
|
LlmWrapper(api_key="test-key")
|
|
49
49
|
|
|
@@ -51,19 +51,19 @@ class TestLlmWrapperOptionalParams:
|
|
|
51
51
|
"""Test that missing api_key raises appropriate error"""
|
|
52
52
|
with patch.dict(os.environ, {}, clear=True):
|
|
53
53
|
# Mock the config values to ensure they're None
|
|
54
|
-
with patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
55
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
54
|
+
with patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_type', 'openai-chat'), \
|
|
55
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', None):
|
|
56
56
|
with pytest.raises(ValueError, match="api_key must be provided"):
|
|
57
57
|
LlmWrapper(llm_type="openai-chat")
|
|
58
58
|
|
|
59
59
|
def test_additional_params_from_config(self):
|
|
60
60
|
"""Test that additional parameters can be loaded from config"""
|
|
61
61
|
# Mock the config values directly
|
|
62
|
-
with patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
63
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
64
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
65
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.llm_temperature', 0.8),\
|
|
66
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
62
|
+
with patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_type', 'openai-chat'),\
|
|
63
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', 'test-key'),\
|
|
64
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_model', 'gpt-4'),\
|
|
65
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_temperature', 0.8),\
|
|
66
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_additional_params', '{"top_p": 0.9, "presence_penalty": 0.1}'):
|
|
67
67
|
try:
|
|
68
68
|
wrapper = LlmWrapper() # No parameters provided
|
|
69
69
|
assert wrapper is not None
|
|
@@ -99,7 +99,7 @@ class TestLlmWrapperOptionalParams:
|
|
|
99
99
|
"""Test that missing both llm_type and api_key raises appropriate error"""
|
|
100
100
|
with patch.dict(os.environ, {}, clear=True):
|
|
101
101
|
# Mock the config values to ensure they're None
|
|
102
|
-
with patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
103
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
102
|
+
with patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_type', None), \
|
|
103
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', None):
|
|
104
104
|
with pytest.raises(ValueError, match="llm_type must be provided"):
|
|
105
105
|
LlmWrapper()
|
{langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/standard/test_optional_llm_integration.py
RENAMED
|
@@ -20,9 +20,9 @@ class TestOptionalLLMIntegration:
|
|
|
20
20
|
def test_chains_with_env_variables(self):
|
|
21
21
|
"""Test that all chains can be initialized without LLM when config defaults are available"""
|
|
22
22
|
# Mock the config values directly instead of environment variables
|
|
23
|
-
with patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
24
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
25
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
23
|
+
with patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_type', 'openai-chat'),\
|
|
24
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', 'test-key'),\
|
|
25
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_model', 'gpt-4'),\
|
|
26
26
|
patch.dict(os.environ, {
|
|
27
27
|
'TIMBR_URL': 'http://test-timbr.com',
|
|
28
28
|
'TIMBR_TOKEN': 'test-token',
|
|
@@ -52,9 +52,9 @@ class TestOptionalLLMIntegration:
|
|
|
52
52
|
def test_agent_with_env_variables(self):
|
|
53
53
|
"""Test that TimbrSqlAgent can be initialized without LLM when config defaults are available"""
|
|
54
54
|
# Mock the config values directly instead of environment variables
|
|
55
|
-
with patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
56
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
57
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
55
|
+
with patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_type', 'openai-chat'),\
|
|
56
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', 'test-key'),\
|
|
57
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_model', 'gpt-4'),\
|
|
58
58
|
patch.dict(os.environ, {
|
|
59
59
|
'TIMBR_URL': 'http://test-timbr.com',
|
|
60
60
|
'TIMBR_TOKEN': 'test-token',
|
|
@@ -74,9 +74,9 @@ class TestOptionalLLMIntegration:
|
|
|
74
74
|
def test_create_agent_function_with_env_variables(self):
|
|
75
75
|
"""Test that create_timbr_sql_agent can be called without LLM when config defaults are available"""
|
|
76
76
|
# Mock the config values directly instead of environment variables
|
|
77
|
-
with patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
78
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
79
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
77
|
+
with patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_type', 'openai-chat'),\
|
|
78
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', 'test-key'),\
|
|
79
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_model', 'gpt-4'),\
|
|
80
80
|
patch.dict(os.environ, {
|
|
81
81
|
'TIMBR_URL': 'http://test-timbr.com',
|
|
82
82
|
'TIMBR_TOKEN': 'test-token',
|
|
@@ -95,9 +95,9 @@ class TestOptionalLLMIntegration:
|
|
|
95
95
|
def test_langgraph_nodes_with_env_variables(self):
|
|
96
96
|
"""Test that all langgraph nodes can be initialized without LLM when config defaults are available"""
|
|
97
97
|
# Mock the config values directly instead of environment variables
|
|
98
|
-
with patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
99
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
100
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
98
|
+
with patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_type', 'openai-chat'),\
|
|
99
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', 'test-key'),\
|
|
100
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_model', 'gpt-4'),\
|
|
101
101
|
patch.dict(os.environ, {
|
|
102
102
|
'TIMBR_URL': 'http://test-timbr.com',
|
|
103
103
|
'TIMBR_TOKEN': 'test-token',
|
|
@@ -126,8 +126,8 @@ class TestOptionalLLMIntegration:
|
|
|
126
126
|
|
|
127
127
|
def test_missing_llm_env_variables_raises_error(self):
|
|
128
128
|
"""Test that missing LLM env variables raise appropriate errors"""
|
|
129
|
-
with patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
130
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
129
|
+
with patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_type', None),\
|
|
130
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', None),\
|
|
131
131
|
patch.dict(os.environ, {
|
|
132
132
|
'TIMBR_URL': 'http://test-timbr.com',
|
|
133
133
|
'TIMBR_TOKEN': 'test-token',
|
|
@@ -144,8 +144,8 @@ class TestOptionalLLMIntegration:
|
|
|
144
144
|
from langchain_timbr.llm_wrapper.llm_wrapper import LlmWrapper
|
|
145
145
|
|
|
146
146
|
# Mock the config values
|
|
147
|
-
with patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
148
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.
|
|
147
|
+
with patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_type', 'openai-chat'),\
|
|
148
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', 'env-key'),\
|
|
149
149
|
patch.dict(os.environ, {
|
|
150
150
|
'TIMBR_URL': 'http://test-timbr.com',
|
|
151
151
|
'TIMBR_TOKEN': 'test-token',
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/langchain/timbr_sql_agent.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/llm_wrapper/timbr_llm_wrapper.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/src/langchain_timbr/utils/timbr_llm_utils.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/integration/test_azure_databricks_provider.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/integration/test_timeout_functionality.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/standard/test_connection_validation.py
RENAMED
|
File without changes
|
{langchain_timbr-2.0.0 → langchain_timbr-2.0.1}/tests/standard/test_standard_chain_requirements.py
RENAMED
|
File without changes
|
|
File without changes
|