langchain-timbr 2.1.4__tar.gz → 2.1.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/PKG-INFO +8 -31
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/README.md +1 -1
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/pyproject.toml +9 -33
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/requirements.txt +2 -1
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/requirements310.txt +2 -1
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/requirements311.txt +2 -1
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/_version.py +2 -2
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langgraph/generate_response_node.py +3 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/llm_wrapper/llm_wrapper.py +51 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/standard/test_optional_llm_integration.py +1 -6
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/.github/dependabot.yml +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/.github/pull_request_template.md +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/.github/workflows/_codespell.yml +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/.github/workflows/_fossa.yml +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/.github/workflows/install-dependencies-and-run-tests.yml +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/.github/workflows/publish.yml +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/.gitignore +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/LICENSE +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/SECURITY.md +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/pytest.ini +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/__init__.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/config.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langchain/__init__.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langchain/execute_timbr_query_chain.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langchain/generate_answer_chain.py +1 -1
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langchain/generate_timbr_sql_chain.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langchain/identify_concept_chain.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langchain/timbr_sql_agent.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langchain/validate_timbr_sql_chain.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langgraph/__init__.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langgraph/execute_timbr_query_node.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langgraph/generate_timbr_sql_node.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langgraph/identify_concept_node.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langgraph/validate_timbr_query_node.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/llm_wrapper/timbr_llm_wrapper.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/timbr_llm_connector.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/utils/general.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/utils/prompt_service.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/utils/temperature_supported_models.json +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/utils/timbr_llm_utils.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/utils/timbr_utils.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/README.md +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/conftest.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/integration/test_agent_integration.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/integration/test_azure_databricks_provider.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/integration/test_azure_openai_model.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/integration/test_chain_pipeline.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/integration/test_jwt_token.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/integration/test_langchain_chains.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/integration/test_langgraph_nodes.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/integration/test_timeout_functionality.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/standard/conftest.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/standard/test_chain_documentation.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/standard/test_connection_validation.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/standard/test_llm_wrapper_optional_params.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/standard/test_standard_chain_requirements.py +0 -0
- {langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/standard/test_unit_tests.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: langchain-timbr
|
|
3
|
-
Version: 2.1.
|
|
3
|
+
Version: 2.1.5
|
|
4
4
|
Summary: LangChain & LangGraph extensions that parse LLM prompts into Timbr semantic SQL and execute them.
|
|
5
5
|
Project-URL: Homepage, https://github.com/WPSemantix/langchain-timbr
|
|
6
6
|
Project-URL: Documentation, https://docs.timbr.ai/doc/docs/integration/langchain-sdk/
|
|
@@ -19,44 +19,18 @@ Classifier: Programming Language :: Python :: 3.11
|
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.12
|
|
20
20
|
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
21
21
|
Requires-Python: <3.13,>=3.10
|
|
22
|
-
Requires-Dist: anthropic==0.42.0
|
|
23
|
-
Requires-Dist: azure-identity==1.25.0; python_version >= '3.11'
|
|
24
|
-
Requires-Dist: azure-identity>=1.16.1; python_version == '3.10'
|
|
25
22
|
Requires-Dist: cryptography==45.0.7; python_version >= '3.11'
|
|
26
23
|
Requires-Dist: cryptography>=44.0.3; python_version == '3.10'
|
|
27
|
-
Requires-Dist: databricks-langchain==0.7.1
|
|
28
|
-
Requires-Dist: databricks-sdk==0.64.0
|
|
29
|
-
Requires-Dist: google-generativeai==0.8.4
|
|
30
|
-
Requires-Dist: langchain-anthropic==0.3.5; python_version >= '3.11'
|
|
31
|
-
Requires-Dist: langchain-anthropic>=0.3.1; python_version == '3.10'
|
|
32
24
|
Requires-Dist: langchain-community==0.3.30; python_version >= '3.11'
|
|
33
|
-
Requires-Dist: langchain-community>=0.3.
|
|
34
|
-
Requires-Dist: langchain-core==0.3.78; python_version >= '3.11'
|
|
25
|
+
Requires-Dist: langchain-community>=0.3.27; python_version == '3.10'
|
|
35
26
|
Requires-Dist: langchain-core>=0.3.58; python_version == '3.10'
|
|
36
|
-
Requires-Dist: langchain-
|
|
37
|
-
Requires-Dist: langchain-google-genai>=2.0.9; python_version == '3.10'
|
|
38
|
-
Requires-Dist: langchain-google-vertexai==2.1.2; python_version >= '3.11'
|
|
39
|
-
Requires-Dist: langchain-google-vertexai>=2.0.28; python_version == '3.10'
|
|
40
|
-
Requires-Dist: langchain-openai==0.3.34; python_version >= '3.11'
|
|
41
|
-
Requires-Dist: langchain-openai>=0.3.16; python_version == '3.10'
|
|
42
|
-
Requires-Dist: langchain-tests==0.3.22; python_version >= '3.11'
|
|
43
|
-
Requires-Dist: langchain-tests>=0.3.20; python_version == '3.10'
|
|
27
|
+
Requires-Dist: langchain-core>=0.3.80; python_version >= '3.11'
|
|
44
28
|
Requires-Dist: langchain==0.3.27; python_version >= '3.11'
|
|
45
29
|
Requires-Dist: langchain>=0.3.25; python_version == '3.10'
|
|
46
30
|
Requires-Dist: langgraph==0.6.8; python_version >= '3.11'
|
|
47
31
|
Requires-Dist: langgraph>=0.3.20; python_version == '3.10'
|
|
48
|
-
Requires-Dist: openai==2.1.0; python_version >= '3.11'
|
|
49
|
-
Requires-Dist: openai>=1.77.0; python_version == '3.10'
|
|
50
|
-
Requires-Dist: opentelemetry-api==1.38.0; python_version == '3.10'
|
|
51
|
-
Requires-Dist: opentelemetry-sdk==1.38.0; python_version == '3.10'
|
|
52
32
|
Requires-Dist: pydantic==2.10.4
|
|
53
|
-
Requires-Dist:
|
|
54
|
-
Requires-Dist: pytimbr-api==2.0.0; python_version >= '3.11'
|
|
55
|
-
Requires-Dist: pytimbr-api>=2.0.0; python_version == '3.10'
|
|
56
|
-
Requires-Dist: snowflake-snowpark-python==1.39.1; python_version >= '3.11'
|
|
57
|
-
Requires-Dist: snowflake-snowpark-python>=1.39.1; python_version == '3.10'
|
|
58
|
-
Requires-Dist: snowflake==1.8.0; python_version >= '3.11'
|
|
59
|
-
Requires-Dist: snowflake>=1.8.0; python_version == '3.10'
|
|
33
|
+
Requires-Dist: pytimbr-api>=2.0.0; python_version >= '3.11'
|
|
60
34
|
Requires-Dist: tiktoken==0.8.0
|
|
61
35
|
Requires-Dist: transformers==4.57.0; python_version >= '3.11'
|
|
62
36
|
Requires-Dist: transformers>=4.53; python_version == '3.10'
|
|
@@ -70,6 +44,7 @@ Requires-Dist: databricks-sdk==0.64.0; extra == 'all'
|
|
|
70
44
|
Requires-Dist: google-generativeai==0.8.4; extra == 'all'
|
|
71
45
|
Requires-Dist: langchain-anthropic==0.3.5; (python_version >= '3.11') and extra == 'all'
|
|
72
46
|
Requires-Dist: langchain-anthropic>=0.3.1; (python_version == '3.10') and extra == 'all'
|
|
47
|
+
Requires-Dist: langchain-aws<1,>=0.2.35; extra == 'all'
|
|
73
48
|
Requires-Dist: langchain-google-genai==2.0.10; (python_version >= '3.11') and extra == 'all'
|
|
74
49
|
Requires-Dist: langchain-google-genai>=2.0.9; (python_version == '3.10') and extra == 'all'
|
|
75
50
|
Requires-Dist: langchain-google-vertexai==2.1.2; (python_version >= '3.11') and extra == 'all'
|
|
@@ -97,6 +72,8 @@ Requires-Dist: langchain-openai==0.3.34; (python_version >= '3.11') and extra ==
|
|
|
97
72
|
Requires-Dist: langchain-openai>=0.3.16; (python_version == '3.10') and extra == 'azure-openai'
|
|
98
73
|
Requires-Dist: openai==2.1.0; (python_version >= '3.11') and extra == 'azure-openai'
|
|
99
74
|
Requires-Dist: openai>=1.77.0; (python_version == '3.10') and extra == 'azure-openai'
|
|
75
|
+
Provides-Extra: bedrock
|
|
76
|
+
Requires-Dist: langchain-aws==0.2.35; extra == 'bedrock'
|
|
100
77
|
Provides-Extra: databricks
|
|
101
78
|
Requires-Dist: databricks-langchain==0.7.1; extra == 'databricks'
|
|
102
79
|
Requires-Dist: databricks-sdk==0.64.0; extra == 'databricks'
|
|
@@ -158,7 +135,7 @@ python -m pip install langchain-timbr
|
|
|
158
135
|
|
|
159
136
|
### Install with selected LLM providers
|
|
160
137
|
|
|
161
|
-
#### One of: openai, anthropic, google, azure_openai, snowflake, databricks, vertex_ai (or 'all')
|
|
138
|
+
#### One of: openai, anthropic, google, azure_openai, snowflake, databricks, vertex_ai, bedrock (or 'all')
|
|
162
139
|
|
|
163
140
|
```bash
|
|
164
141
|
python -m pip install 'langchain-timbr[<your selected providers, separated by comma w/o space>]'
|
|
@@ -29,7 +29,7 @@ python -m pip install langchain-timbr
|
|
|
29
29
|
|
|
30
30
|
### Install with selected LLM providers
|
|
31
31
|
|
|
32
|
-
#### One of: openai, anthropic, google, azure_openai, snowflake, databricks, vertex_ai (or 'all')
|
|
32
|
+
#### One of: openai, anthropic, google, azure_openai, snowflake, databricks, vertex_ai, bedrock (or 'all')
|
|
33
33
|
|
|
34
34
|
```bash
|
|
35
35
|
python -m pip install 'langchain-timbr[<your selected providers, separated by comma w/o space>]'
|
|
@@ -24,53 +24,25 @@ classifiers = [
|
|
|
24
24
|
|
|
25
25
|
dependencies = [
|
|
26
26
|
# Core dependencies that are the same across all Python versions
|
|
27
|
-
"anthropic==0.42.0",
|
|
28
|
-
"databricks-langchain==0.7.1",
|
|
29
|
-
"databricks-sdk==0.64.0",
|
|
30
|
-
"google-generativeai==0.8.4",
|
|
31
27
|
"pydantic==2.10.4",
|
|
32
|
-
"pytest==8.3.4",
|
|
33
28
|
"tiktoken==0.8.0",
|
|
34
29
|
"uvicorn==0.34.0",
|
|
30
|
+
"pytimbr-api>=2.0.0; python_version>='3.11'",
|
|
35
31
|
|
|
36
32
|
# Python 3.11+ versions (same for 3.11 and 3.12+)
|
|
37
|
-
"azure-identity==1.25.0; python_version>='3.11'",
|
|
38
33
|
"cryptography==45.0.7; python_version>='3.11'",
|
|
39
34
|
"langchain==0.3.27; python_version>='3.11'",
|
|
40
|
-
"langchain-anthropic==0.3.5; python_version>='3.11'",
|
|
41
35
|
"langchain-community==0.3.30; python_version>='3.11'",
|
|
42
|
-
"langchain-core
|
|
43
|
-
"langchain-google-genai==2.0.10; python_version>='3.11'",
|
|
44
|
-
"langchain-google-vertexai==2.1.2; python_version>='3.11'",
|
|
45
|
-
"langchain-openai==0.3.34; python_version>='3.11'",
|
|
46
|
-
"langchain-tests==0.3.22; python_version>='3.11'",
|
|
36
|
+
"langchain-core>=0.3.80; python_version>='3.11'",
|
|
47
37
|
"langgraph==0.6.8; python_version>='3.11'",
|
|
48
|
-
"openai==2.1.0; python_version>='3.11'",
|
|
49
|
-
"opentelemetry-api==1.38.0; python_version=='3.10'",
|
|
50
|
-
"opentelemetry-sdk==1.38.0; python_version=='3.10'",
|
|
51
|
-
"pytimbr_api==2.0.0; python_version>='3.11'",
|
|
52
|
-
"snowflake==1.8.0; python_version>='3.11'",
|
|
53
|
-
"snowflake-snowpark-python==1.39.1; python_version>='3.11'",
|
|
54
38
|
"transformers==4.57.0; python_version>='3.11'",
|
|
55
39
|
|
|
56
40
|
# Python 3.10 specific versions
|
|
57
|
-
"azure-identity>=1.16.1; python_version=='3.10'",
|
|
58
41
|
"cryptography>=44.0.3; python_version=='3.10'",
|
|
59
42
|
"langchain>=0.3.25; python_version=='3.10'",
|
|
60
|
-
"langchain-
|
|
61
|
-
"langchain_community>=0.3.20; python_version=='3.10'",
|
|
43
|
+
"langchain-community>=0.3.27; python_version=='3.10'",
|
|
62
44
|
"langchain-core>=0.3.58; python_version=='3.10'",
|
|
63
|
-
"langchain-google-genai>=2.0.9; python_version=='3.10'",
|
|
64
|
-
"langchain-google-vertexai>=2.0.28; python_version=='3.10'",
|
|
65
|
-
"langchain-openai>=0.3.16; python_version=='3.10'",
|
|
66
|
-
"langchain-tests>=0.3.20; python_version=='3.10'",
|
|
67
45
|
"langgraph>=0.3.20; python_version=='3.10'",
|
|
68
|
-
"openai>=1.77.0; python_version=='3.10'",
|
|
69
|
-
"opentelemetry-api==1.38.0; python_version=='3.10'",
|
|
70
|
-
"opentelemetry-sdk==1.38.0; python_version=='3.10'",
|
|
71
|
-
"pytimbr-api>=2.0.0; python_version=='3.10'",
|
|
72
|
-
"snowflake>=1.8.0; python_version=='3.10'",
|
|
73
|
-
"snowflake-snowpark-python>=1.39.1; python_version=='3.10'",
|
|
74
46
|
"transformers>=4.53; python_version=='3.10'",
|
|
75
47
|
]
|
|
76
48
|
|
|
@@ -117,6 +89,9 @@ databricks = [
|
|
|
117
89
|
"databricks-langchain==0.7.1",
|
|
118
90
|
"databricks-sdk==0.64.0"
|
|
119
91
|
]
|
|
92
|
+
bedrock = [
|
|
93
|
+
"langchain-aws==0.2.35",
|
|
94
|
+
]
|
|
120
95
|
|
|
121
96
|
# Development and testing
|
|
122
97
|
dev = [
|
|
@@ -128,12 +103,13 @@ dev = [
|
|
|
128
103
|
|
|
129
104
|
# All optional dependencies
|
|
130
105
|
all = [
|
|
131
|
-
"anthropic==0.42.0",
|
|
132
|
-
"google-generativeai==0.8.4",
|
|
133
106
|
"pytest==8.3.4",
|
|
134
107
|
"uvicorn==0.34.0",
|
|
108
|
+
"anthropic==0.42.0",
|
|
109
|
+
"google-generativeai==0.8.4",
|
|
135
110
|
"databricks-langchain==0.7.1",
|
|
136
111
|
"databricks-sdk==0.64.0",
|
|
112
|
+
"langchain-aws>=0.2.35,<1",
|
|
137
113
|
# Python 3.11+ versions
|
|
138
114
|
"azure-identity==1.25.0; python_version>='3.11'",
|
|
139
115
|
"langchain-anthropic==0.3.5; python_version>='3.11'",
|
|
@@ -6,8 +6,9 @@ databricks-sdk==0.64.0
|
|
|
6
6
|
google-generativeai==0.8.4
|
|
7
7
|
langchain==0.3.27
|
|
8
8
|
langchain-anthropic==0.3.5
|
|
9
|
+
langchain-aws==0.2.35
|
|
9
10
|
langchain-community==0.3.30
|
|
10
|
-
langchain-core==0.3.
|
|
11
|
+
langchain-core==0.3.80
|
|
11
12
|
langchain-google-genai==2.0.10
|
|
12
13
|
langchain-google-vertexai==2.1.2
|
|
13
14
|
langchain-openai==0.3.34
|
|
@@ -6,7 +6,8 @@ databricks-sdk==0.64.0
|
|
|
6
6
|
google-generativeai==0.8.4
|
|
7
7
|
langchain>=0.3.25
|
|
8
8
|
langchain-anthropic>=0.3.1
|
|
9
|
-
|
|
9
|
+
langchain-aws==0.2.35
|
|
10
|
+
langchain-community>=0.3.27
|
|
10
11
|
langchain-core>=0.3.58
|
|
11
12
|
langchain-google-genai>=2.0.9
|
|
12
13
|
langchain-google-vertexai>=2.0.28
|
|
@@ -6,8 +6,9 @@ databricks-sdk==0.64.0
|
|
|
6
6
|
google-generativeai==0.8.4
|
|
7
7
|
langchain==0.3.27
|
|
8
8
|
langchain-anthropic==0.3.5
|
|
9
|
+
langchain-aws==0.2.35
|
|
9
10
|
langchain-community==0.3.30
|
|
10
|
-
langchain-core==0.3.
|
|
11
|
+
langchain-core==0.3.80
|
|
11
12
|
langchain-google-genai==2.0.10
|
|
12
13
|
langchain-google-vertexai==2.1.2
|
|
13
14
|
langchain-openai==0.3.34
|
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '2.1.
|
|
32
|
-
__version_tuple__ = version_tuple = (2, 1,
|
|
31
|
+
__version__ = version = '2.1.5'
|
|
32
|
+
__version_tuple__ = version_tuple = (2, 1, 5)
|
|
33
33
|
|
|
34
34
|
__commit_id__ = commit_id = None
|
|
@@ -20,6 +20,7 @@ class GenerateResponseNode:
|
|
|
20
20
|
is_jwt: Optional[bool] = False,
|
|
21
21
|
jwt_tenant_id: Optional[str] = None,
|
|
22
22
|
conn_params: Optional[dict] = None,
|
|
23
|
+
note: Optional[str] = '',
|
|
23
24
|
debug: Optional[bool] = False,
|
|
24
25
|
**kwargs,
|
|
25
26
|
):
|
|
@@ -31,6 +32,7 @@ class GenerateResponseNode:
|
|
|
31
32
|
:param is_jwt: Whether to use JWT authentication (default is False).
|
|
32
33
|
:param jwt_tenant_id: JWT tenant ID for multi-tenant environments (required when is_jwt=True).
|
|
33
34
|
:param conn_params: Extra Timbr connection parameters sent with every request (e.g., 'x-api-impersonate-user').
|
|
35
|
+
:param note: Optional additional note to extend our llm prompt
|
|
34
36
|
"""
|
|
35
37
|
self.chain = GenerateAnswerChain(
|
|
36
38
|
llm=llm,
|
|
@@ -40,6 +42,7 @@ class GenerateResponseNode:
|
|
|
40
42
|
is_jwt=is_jwt,
|
|
41
43
|
jwt_tenant_id=jwt_tenant_id,
|
|
42
44
|
conn_params=conn_params,
|
|
45
|
+
note=note,
|
|
43
46
|
debug=debug,
|
|
44
47
|
**kwargs,
|
|
45
48
|
)
|
{langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/llm_wrapper/llm_wrapper.py
RENAMED
|
@@ -15,6 +15,7 @@ class LlmTypes(Enum):
|
|
|
15
15
|
Snowflake = 'snowflake-cortex'
|
|
16
16
|
Databricks = 'chat-databricks'
|
|
17
17
|
VertexAI = 'chat-vertexai'
|
|
18
|
+
Bedrock = 'amazon_bedrock_converse_chat'
|
|
18
19
|
Timbr = 'timbr'
|
|
19
20
|
|
|
20
21
|
|
|
@@ -252,6 +253,28 @@ class LlmWrapper(LLM):
|
|
|
252
253
|
credentials=creds,
|
|
253
254
|
**params,
|
|
254
255
|
)
|
|
256
|
+
elif is_llm_type(llm_type, LlmTypes.Bedrock):
|
|
257
|
+
from langchain_aws import ChatBedrockConverse
|
|
258
|
+
llm_model = model or "openai.gpt-oss-20b-1:0"
|
|
259
|
+
params = self._add_temperature(LlmTypes.Bedrock.name, llm_model, **llm_params)
|
|
260
|
+
|
|
261
|
+
aws_region = pop_param_value(params, ['aws_region', 'llm_region', 'region'])
|
|
262
|
+
if aws_region:
|
|
263
|
+
params['region_name'] = aws_region
|
|
264
|
+
aws_access_key_id = pop_param_value(params, ['aws_access_key_id', 'llm_access_key_id', 'access_key_id'])
|
|
265
|
+
if aws_access_key_id:
|
|
266
|
+
params['aws_access_key_id'] = aws_access_key_id
|
|
267
|
+
aws_secret_access_key = pop_param_value(params, ['aws_secret_access_key', 'llm_secret_access_key', 'secret_access_key'], default=api_key)
|
|
268
|
+
if aws_secret_access_key:
|
|
269
|
+
params['aws_secret_access_key'] = aws_secret_access_key
|
|
270
|
+
aws_session_token = pop_param_value(params, ['aws_session_token', 'llm_session_token', 'session_token'])
|
|
271
|
+
if aws_session_token:
|
|
272
|
+
params['aws_session_token'] = aws_session_token
|
|
273
|
+
|
|
274
|
+
return ChatBedrockConverse(
|
|
275
|
+
model=llm_model,
|
|
276
|
+
**params,
|
|
277
|
+
)
|
|
255
278
|
else:
|
|
256
279
|
raise ValueError(f"Unsupported LLM type: {llm_type}")
|
|
257
280
|
|
|
@@ -324,6 +347,31 @@ class LlmWrapper(LLM):
|
|
|
324
347
|
if self.client.credentials:
|
|
325
348
|
client = genai.Client(credentials=self.client.credentials, vertexai=True, project=self.client.project, location=self.client.location)
|
|
326
349
|
models = [m.name.split('/')[-1] for m in client.models.list()]
|
|
350
|
+
elif is_llm_type(self._llm_type, LlmTypes.Bedrock):
|
|
351
|
+
import boto3
|
|
352
|
+
|
|
353
|
+
# Extract SecretStr values properly
|
|
354
|
+
aws_access_key_id = getattr(self.client, 'aws_access_key_id', None)
|
|
355
|
+
if aws_access_key_id and hasattr(aws_access_key_id, '_secret_value'):
|
|
356
|
+
aws_access_key_id = aws_access_key_id._secret_value
|
|
357
|
+
|
|
358
|
+
aws_secret_access_key = getattr(self.client, 'aws_secret_access_key', None)
|
|
359
|
+
if aws_secret_access_key and hasattr(aws_secret_access_key, '_secret_value'):
|
|
360
|
+
aws_secret_access_key = aws_secret_access_key._secret_value
|
|
361
|
+
|
|
362
|
+
aws_session_token = getattr(self.client, 'aws_session_token', None)
|
|
363
|
+
if aws_session_token and hasattr(aws_session_token, '_secret_value'):
|
|
364
|
+
aws_session_token = aws_session_token._secret_value
|
|
365
|
+
|
|
366
|
+
bedrock_client = boto3.client(
|
|
367
|
+
service_name='bedrock',
|
|
368
|
+
region_name=getattr(self.client, 'region_name', None),
|
|
369
|
+
aws_access_key_id=aws_access_key_id,
|
|
370
|
+
aws_secret_access_key=aws_secret_access_key,
|
|
371
|
+
aws_session_token=aws_session_token,
|
|
372
|
+
)
|
|
373
|
+
response = bedrock_client.list_foundation_models()
|
|
374
|
+
models = [model['modelId'] for model in response.get('modelSummaries', [])]
|
|
327
375
|
|
|
328
376
|
except Exception:
|
|
329
377
|
# If model list fetching throws an exception, return default value using get_supported_models
|
|
@@ -341,6 +389,9 @@ class LlmWrapper(LLM):
|
|
|
341
389
|
|
|
342
390
|
|
|
343
391
|
def _call(self, prompt, **kwargs):
|
|
392
|
+
# TODO: Remove this condition on next langchain-timbr major release
|
|
393
|
+
if is_llm_type(self._llm_type, LlmTypes.Bedrock):
|
|
394
|
+
return self.client.invoke(prompt, **kwargs)
|
|
344
395
|
return self.client(prompt, **kwargs)
|
|
345
396
|
|
|
346
397
|
|
{langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/standard/test_optional_llm_integration.py
RENAMED
|
@@ -145,12 +145,7 @@ class TestOptionalLLMIntegration:
|
|
|
145
145
|
|
|
146
146
|
# Mock the config values
|
|
147
147
|
with patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_type', 'openai-chat'),\
|
|
148
|
-
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', 'env-key')
|
|
149
|
-
patch.dict(os.environ, {
|
|
150
|
-
'TIMBR_URL': 'http://test-timbr.com',
|
|
151
|
-
'TIMBR_TOKEN': 'test-token',
|
|
152
|
-
'TIMBR_ONTOLOGY': 'test-ontology'
|
|
153
|
-
}):
|
|
148
|
+
patch('langchain_timbr.llm_wrapper.llm_wrapper.config.llm_api_key', 'env-key'):
|
|
154
149
|
# Create explicit LLM
|
|
155
150
|
explicit_llm = LlmWrapper(
|
|
156
151
|
llm_type='openai-chat',
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
@@ -34,8 +34,8 @@ class GenerateAnswerChain(Chain):
|
|
|
34
34
|
:param verify_ssl: Whether to verify SSL certificates (default is True).
|
|
35
35
|
:param is_jwt: Whether to use JWT authentication (default is False).
|
|
36
36
|
:param jwt_tenant_id: JWT tenant ID for multi-tenant environments (required when is_jwt=True).
|
|
37
|
-
:param note: Optional additional note to extend our llm prompt
|
|
38
37
|
:param conn_params: Extra Timbr connection parameters sent with every request (e.g., 'x-api-impersonate-user').
|
|
38
|
+
:param note: Optional additional note to extend our llm prompt
|
|
39
39
|
|
|
40
40
|
## Example
|
|
41
41
|
```
|
|
File without changes
|
|
File without changes
|
{langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/langchain/timbr_sql_agent.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/llm_wrapper/timbr_llm_wrapper.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/src/langchain_timbr/utils/timbr_llm_utils.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/integration/test_azure_databricks_provider.py
RENAMED
|
File without changes
|
{langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/integration/test_azure_openai_model.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/integration/test_timeout_functionality.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/standard/test_connection_validation.py
RENAMED
|
File without changes
|
{langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/standard/test_llm_wrapper_optional_params.py
RENAMED
|
File without changes
|
{langchain_timbr-2.1.4 → langchain_timbr-2.1.5}/tests/standard/test_standard_chain_requirements.py
RENAMED
|
File without changes
|
|
File without changes
|