langchain-timbr 2.0.3__tar.gz → 2.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. langchain_timbr-2.1.0/.github/workflows/install-dependencies-and-run-tests.yml +94 -0
  2. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/.gitignore +4 -1
  3. langchain_timbr-2.1.0/PKG-INFO +213 -0
  4. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/README.md +2 -2
  5. langchain_timbr-2.1.0/pyproject.toml +173 -0
  6. langchain_timbr-2.1.0/requirements.txt +24 -0
  7. langchain_timbr-2.0.3/requirements.txt → langchain_timbr-2.1.0/requirements310.txt +6 -5
  8. langchain_timbr-2.1.0/requirements311.txt +26 -0
  9. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/_version.py +2 -2
  10. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/utils/timbr_llm_utils.py +21 -16
  11. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/standard/test_chain_documentation.py +1 -1
  12. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/standard/test_unit_tests.py +107 -1
  13. langchain_timbr-2.0.3/.github/workflows/install-dependencies-and-run-tests.yml +0 -59
  14. langchain_timbr-2.0.3/PKG-INFO +0 -163
  15. langchain_timbr-2.0.3/pyproject.toml +0 -95
  16. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/.github/dependabot.yml +0 -0
  17. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/.github/pull_request_template.md +0 -0
  18. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/.github/workflows/_codespell.yml +0 -0
  19. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/.github/workflows/_fossa.yml +0 -0
  20. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/.github/workflows/publish.yml +0 -0
  21. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/LICENSE +0 -0
  22. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/pytest.ini +0 -0
  23. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/__init__.py +0 -0
  24. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/config.py +0 -0
  25. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langchain/__init__.py +0 -0
  26. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langchain/execute_timbr_query_chain.py +0 -0
  27. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langchain/generate_answer_chain.py +0 -0
  28. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langchain/generate_timbr_sql_chain.py +0 -0
  29. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langchain/identify_concept_chain.py +0 -0
  30. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langchain/timbr_sql_agent.py +0 -0
  31. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langchain/validate_timbr_sql_chain.py +0 -0
  32. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langgraph/__init__.py +0 -0
  33. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langgraph/execute_timbr_query_node.py +0 -0
  34. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langgraph/generate_response_node.py +0 -0
  35. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langgraph/generate_timbr_sql_node.py +0 -0
  36. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langgraph/identify_concept_node.py +0 -0
  37. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/langgraph/validate_timbr_query_node.py +0 -0
  38. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/llm_wrapper/llm_wrapper.py +0 -0
  39. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/llm_wrapper/timbr_llm_wrapper.py +0 -0
  40. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/timbr_llm_connector.py +0 -0
  41. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/utils/general.py +0 -0
  42. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/utils/prompt_service.py +0 -0
  43. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/utils/temperature_supported_models.json +0 -0
  44. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/src/langchain_timbr/utils/timbr_utils.py +0 -0
  45. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/README.md +0 -0
  46. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/conftest.py +0 -0
  47. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/integration/test_agent_integration.py +0 -0
  48. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/integration/test_azure_databricks_provider.py +0 -0
  49. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/integration/test_azure_openai_model.py +0 -0
  50. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/integration/test_chain_pipeline.py +0 -0
  51. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/integration/test_jwt_token.py +0 -0
  52. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/integration/test_langchain_chains.py +0 -0
  53. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/integration/test_langgraph_nodes.py +0 -0
  54. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/integration/test_timeout_functionality.py +0 -0
  55. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/standard/conftest.py +0 -0
  56. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/standard/test_connection_validation.py +0 -0
  57. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/standard/test_llm_wrapper_optional_params.py +0 -0
  58. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/standard/test_optional_llm_integration.py +0 -0
  59. {langchain_timbr-2.0.3 → langchain_timbr-2.1.0}/tests/standard/test_standard_chain_requirements.py +0 -0
@@ -0,0 +1,94 @@
1
+ name: Install Dependencies and Run Tests
2
+
3
+ on:
4
+ push:
5
+ branches: [ main, master ]
6
+ pull_request:
7
+ branches: [ main, master ]
8
+
9
+ env:
10
+ TIMBR_URL: ${{ secrets.TIMBR_URL }}
11
+ TIMBR_TOKEN: ${{ secrets.TIMBR_TOKEN }}
12
+ LLM_TYPE: ${{ secrets.LLM_TYPE }}
13
+ LLM_MODEL: ${{ secrets.LLM_MODEL }}
14
+ LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
15
+ JWT_TIMBR_URL: ${{ secrets.JWT_TIMBR_URL }}
16
+ JWT_TIMBR_ONTOLOGY: ${{ secrets.JWT_TIMBR_ONTOLOGY }}
17
+ JWT_TENANT_ID: ${{ secrets.JWT_TENANT_ID }}
18
+ JWT_CLIENT_ID: ${{ secrets.JWT_CLIENT_ID }}
19
+ JWT_USERNAME: ${{ secrets.JWT_USERNAME }}
20
+ JWT_PASSWORD: ${{ secrets.JWT_PASSWORD }}
21
+ JWT_SCOPE: ${{ secrets.JWT_SCOPE }}
22
+ JWT_SECRET: ${{ secrets.JWT_SECRET }}
23
+
24
+ jobs:
25
+ test-python-310:
26
+ runs-on: ubuntu-latest
27
+
28
+ steps:
29
+ - name: Checkout repository
30
+ uses: actions/checkout@v3
31
+
32
+ - name: Set up Python 3.10
33
+ uses: actions/setup-python@v4
34
+ with:
35
+ python-version: '3.10'
36
+
37
+ - name: Install dependencies
38
+ run: |
39
+ python -m pip install --upgrade pip
40
+ if [ -f requirements310.txt ]; then pip install -r requirements310.txt; fi
41
+ if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
42
+ pip install pytest
43
+
44
+ - name: Run tests
45
+ run: |
46
+ python -m pytest -v --maxfail=1 --disable-warnings -q
47
+
48
+ test-python-311:
49
+ runs-on: ubuntu-latest
50
+ needs: test-python-310
51
+
52
+ steps:
53
+ - name: Checkout repository
54
+ uses: actions/checkout@v3
55
+
56
+ - name: Set up Python 3.11
57
+ uses: actions/setup-python@v4
58
+ with:
59
+ python-version: '3.11'
60
+
61
+ - name: Install dependencies
62
+ run: |
63
+ python -m pip install --upgrade pip
64
+ if [ -f requirements311.txt ]; then pip install -r requirements311.txt; fi
65
+ if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
66
+ pip install pytest
67
+
68
+ - name: Run tests
69
+ run: |
70
+ python -m pytest -v --maxfail=1 --disable-warnings -q
71
+
72
+ test-python-312:
73
+ runs-on: ubuntu-latest
74
+ needs: test-python-311
75
+
76
+ steps:
77
+ - name: Checkout repository
78
+ uses: actions/checkout@v3
79
+
80
+ - name: Set up Python 3.12
81
+ uses: actions/setup-python@v4
82
+ with:
83
+ python-version: '3.12'
84
+
85
+ - name: Install dependencies
86
+ run: |
87
+ python -m pip install --upgrade pip
88
+ if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
89
+ if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
90
+ pip install pytest
91
+
92
+ - name: Run tests
93
+ run: |
94
+ python -m pytest -v --maxfail=1 --disable-warnings -q
@@ -18,4 +18,7 @@ __pycache__/
18
18
  .env
19
19
  env
20
20
  venv
21
- .DS_Store
21
+ .DS_Store
22
+ env310/
23
+ env311/
24
+ env312/
@@ -0,0 +1,213 @@
1
+ Metadata-Version: 2.4
2
+ Name: langchain-timbr
3
+ Version: 2.1.0
4
+ Summary: LangChain & LangGraph extensions that parse LLM prompts into Timbr semantic SQL and execute them.
5
+ Project-URL: Homepage, https://github.com/WPSemantix/langchain-timbr
6
+ Project-URL: Documentation, https://docs.timbr.ai/doc/docs/integration/langchain-sdk/
7
+ Project-URL: Source, https://github.com/WPSemantix/langchain-timbr
8
+ Project-URL: Issues, https://github.com/WPSemantix/langchain-timbr/issues
9
+ Author-email: "Timbr.ai" <contact@timbr.ai>
10
+ License: MIT
11
+ License-File: LICENSE
12
+ Keywords: Agents,Knowledge Graph,LLM,LangChain,LangGraph,SQL,Semantic Layer,Timbr
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3 :: Only
17
+ Classifier: Programming Language :: Python :: 3.10
18
+ Classifier: Programming Language :: Python :: 3.11
19
+ Classifier: Programming Language :: Python :: 3.12
20
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
21
+ Requires-Python: <3.13,>=3.10
22
+ Requires-Dist: anthropic==0.42.0
23
+ Requires-Dist: azure-identity==1.25.0; python_version >= '3.11'
24
+ Requires-Dist: azure-identity>=1.16.1; python_version == '3.10'
25
+ Requires-Dist: cryptography==45.0.7; python_version >= '3.11'
26
+ Requires-Dist: cryptography>=44.0.3; python_version == '3.10'
27
+ Requires-Dist: databricks-langchain==0.7.1
28
+ Requires-Dist: databricks-sdk==0.64.0
29
+ Requires-Dist: google-generativeai==0.8.4
30
+ Requires-Dist: langchain-anthropic==0.3.5; python_version >= '3.11'
31
+ Requires-Dist: langchain-anthropic>=0.3.1; python_version == '3.10'
32
+ Requires-Dist: langchain-community==0.3.30; python_version >= '3.11'
33
+ Requires-Dist: langchain-community>=0.3.20; python_version == '3.10'
34
+ Requires-Dist: langchain-core==0.3.78; python_version >= '3.11'
35
+ Requires-Dist: langchain-core>=0.3.58; python_version == '3.10'
36
+ Requires-Dist: langchain-google-genai==2.0.10; python_version >= '3.11'
37
+ Requires-Dist: langchain-google-genai>=2.0.9; python_version == '3.10'
38
+ Requires-Dist: langchain-google-vertexai==2.1.2; python_version >= '3.11'
39
+ Requires-Dist: langchain-google-vertexai>=2.0.28; python_version == '3.10'
40
+ Requires-Dist: langchain-openai==0.3.34; python_version >= '3.11'
41
+ Requires-Dist: langchain-openai>=0.3.16; python_version == '3.10'
42
+ Requires-Dist: langchain-tests==0.3.22; python_version >= '3.11'
43
+ Requires-Dist: langchain-tests>=0.3.20; python_version == '3.10'
44
+ Requires-Dist: langchain==0.3.27; python_version >= '3.11'
45
+ Requires-Dist: langchain>=0.3.25; python_version == '3.10'
46
+ Requires-Dist: langgraph==0.6.8; python_version >= '3.11'
47
+ Requires-Dist: langgraph>=0.3.20; python_version == '3.10'
48
+ Requires-Dist: openai==2.1.0; python_version >= '3.11'
49
+ Requires-Dist: openai>=1.77.0; python_version == '3.10'
50
+ Requires-Dist: opentelemetry-api==1.38.0; python_version == '3.10'
51
+ Requires-Dist: opentelemetry-sdk==1.38.0; python_version == '3.10'
52
+ Requires-Dist: pydantic==2.10.4
53
+ Requires-Dist: pytest==8.3.4
54
+ Requires-Dist: pytimbr-api==2.0.0; python_version >= '3.11'
55
+ Requires-Dist: pytimbr-api>=2.0.0; python_version == '3.10'
56
+ Requires-Dist: snowflake-snowpark-python==1.39.1; python_version >= '3.11'
57
+ Requires-Dist: snowflake-snowpark-python>=1.39.1; python_version == '3.10'
58
+ Requires-Dist: snowflake==1.8.0; python_version >= '3.11'
59
+ Requires-Dist: snowflake>=1.8.0; python_version == '3.10'
60
+ Requires-Dist: tiktoken==0.8.0
61
+ Requires-Dist: transformers==4.57.0; python_version >= '3.11'
62
+ Requires-Dist: transformers>=4.53; python_version == '3.10'
63
+ Requires-Dist: uvicorn==0.34.0
64
+ Provides-Extra: all
65
+ Requires-Dist: anthropic==0.42.0; extra == 'all'
66
+ Requires-Dist: azure-identity==1.25.0; (python_version >= '3.11') and extra == 'all'
67
+ Requires-Dist: azure-identity>=1.16.1; (python_version == '3.10') and extra == 'all'
68
+ Requires-Dist: databricks-langchain==0.7.1; extra == 'all'
69
+ Requires-Dist: databricks-sdk==0.64.0; extra == 'all'
70
+ Requires-Dist: google-generativeai==0.8.4; extra == 'all'
71
+ Requires-Dist: langchain-anthropic==0.3.5; (python_version >= '3.11') and extra == 'all'
72
+ Requires-Dist: langchain-anthropic>=0.3.1; (python_version == '3.10') and extra == 'all'
73
+ Requires-Dist: langchain-google-genai==2.0.10; (python_version >= '3.11') and extra == 'all'
74
+ Requires-Dist: langchain-google-genai>=2.0.9; (python_version == '3.10') and extra == 'all'
75
+ Requires-Dist: langchain-google-vertexai==2.1.2; (python_version >= '3.11') and extra == 'all'
76
+ Requires-Dist: langchain-google-vertexai>=2.0.28; (python_version == '3.10') and extra == 'all'
77
+ Requires-Dist: langchain-openai==0.3.34; (python_version >= '3.11') and extra == 'all'
78
+ Requires-Dist: langchain-openai>=0.3.16; (python_version == '3.10') and extra == 'all'
79
+ Requires-Dist: langchain-tests==0.3.22; (python_version >= '3.11') and extra == 'all'
80
+ Requires-Dist: langchain-tests>=0.3.20; (python_version == '3.10') and extra == 'all'
81
+ Requires-Dist: openai==2.1.0; (python_version >= '3.11') and extra == 'all'
82
+ Requires-Dist: openai>=1.77.0; (python_version == '3.10') and extra == 'all'
83
+ Requires-Dist: pytest==8.3.4; extra == 'all'
84
+ Requires-Dist: snowflake-snowpark-python==1.39.1; (python_version >= '3.11') and extra == 'all'
85
+ Requires-Dist: snowflake-snowpark-python>=1.39.1; (python_version == '3.10') and extra == 'all'
86
+ Requires-Dist: snowflake==1.8.0; (python_version >= '3.11') and extra == 'all'
87
+ Requires-Dist: snowflake>=1.8.0; (python_version == '3.10') and extra == 'all'
88
+ Requires-Dist: uvicorn==0.34.0; extra == 'all'
89
+ Provides-Extra: anthropic
90
+ Requires-Dist: anthropic==0.42.0; extra == 'anthropic'
91
+ Requires-Dist: langchain-anthropic==0.3.5; (python_version >= '3.11') and extra == 'anthropic'
92
+ Requires-Dist: langchain-anthropic>=0.3.1; (python_version == '3.10') and extra == 'anthropic'
93
+ Provides-Extra: azure-openai
94
+ Requires-Dist: azure-identity==1.25.0; (python_version >= '3.11') and extra == 'azure-openai'
95
+ Requires-Dist: azure-identity>=1.16.1; (python_version == '3.10') and extra == 'azure-openai'
96
+ Requires-Dist: langchain-openai==0.3.34; (python_version >= '3.11') and extra == 'azure-openai'
97
+ Requires-Dist: langchain-openai>=0.3.16; (python_version == '3.10') and extra == 'azure-openai'
98
+ Requires-Dist: openai==2.1.0; (python_version >= '3.11') and extra == 'azure-openai'
99
+ Requires-Dist: openai>=1.77.0; (python_version == '3.10') and extra == 'azure-openai'
100
+ Provides-Extra: databricks
101
+ Requires-Dist: databricks-langchain==0.7.1; extra == 'databricks'
102
+ Requires-Dist: databricks-sdk==0.64.0; extra == 'databricks'
103
+ Provides-Extra: dev
104
+ Requires-Dist: langchain-tests==0.3.22; (python_version >= '3.11') and extra == 'dev'
105
+ Requires-Dist: langchain-tests>=0.3.20; (python_version == '3.10') and extra == 'dev'
106
+ Requires-Dist: pytest==8.3.4; extra == 'dev'
107
+ Requires-Dist: uvicorn==0.34.0; extra == 'dev'
108
+ Provides-Extra: google
109
+ Requires-Dist: google-generativeai==0.8.4; extra == 'google'
110
+ Requires-Dist: langchain-google-genai==2.0.10; (python_version >= '3.11') and extra == 'google'
111
+ Requires-Dist: langchain-google-genai>=2.0.9; (python_version == '3.10') and extra == 'google'
112
+ Provides-Extra: openai
113
+ Requires-Dist: langchain-openai==0.3.34; (python_version >= '3.11') and extra == 'openai'
114
+ Requires-Dist: langchain-openai>=0.3.16; (python_version == '3.10') and extra == 'openai'
115
+ Requires-Dist: openai==2.1.0; (python_version >= '3.11') and extra == 'openai'
116
+ Requires-Dist: openai>=1.77.0; (python_version == '3.10') and extra == 'openai'
117
+ Provides-Extra: snowflake
118
+ Requires-Dist: opentelemetry-api==1.38.0; (python_version < '3.12') and extra == 'snowflake'
119
+ Requires-Dist: opentelemetry-sdk==1.38.0; (python_version < '3.12') and extra == 'snowflake'
120
+ Requires-Dist: snowflake-snowpark-python==1.39.1; (python_version >= '3.11') and extra == 'snowflake'
121
+ Requires-Dist: snowflake-snowpark-python>=1.39.1; (python_version == '3.10') and extra == 'snowflake'
122
+ Requires-Dist: snowflake==1.8.0; (python_version >= '3.11') and extra == 'snowflake'
123
+ Requires-Dist: snowflake>=1.8.0; (python_version == '3.10') and extra == 'snowflake'
124
+ Provides-Extra: vertex-ai
125
+ Requires-Dist: google-generativeai==0.8.4; extra == 'vertex-ai'
126
+ Requires-Dist: langchain-google-vertexai==2.1.2; (python_version >= '3.11') and extra == 'vertex-ai'
127
+ Requires-Dist: langchain-google-vertexai>=2.0.28; (python_version == '3.10') and extra == 'vertex-ai'
128
+ Description-Content-Type: text/markdown
129
+
130
+ ![Timbr logo description](https://timbr.ai/wp-content/uploads/2025/01/logotimbrai230125.png)
131
+
132
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr.svg?type=shield&issueType=security)](https://app.fossa.com/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr?ref=badge_shield&issueType=security)
133
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr.svg?type=shield&issueType=license)](https://app.fossa.com/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr?ref=badge_shield&issueType=license)
134
+
135
+
136
+ [![Python 3.10](https://img.shields.io/badge/python-3.10-blue.svg)](https://www.python.org/downloads/release/python-31017/)
137
+ [![Python 3.11](https://img.shields.io/badge/python-3.11-blue.svg)](https://www.python.org/downloads/release/python-31112/)
138
+ [![Python 3.12](https://img.shields.io/badge/python-3.12-blue.svg)](https://www.python.org/downloads/release/python-3129/)
139
+
140
+ # Timbr LangChain LLM SDK
141
+
142
+ Timbr LangChain LLM SDK is a Python SDK that extends LangChain and LangGraph with custom agents, chains, and nodes for seamless integration with the Timbr semantic layer. It enables converting natural language prompts into optimized semantic-SQL queries and executing them directly against your data.
143
+
144
+ ![Timbr LangGraph pipeline](https://docs.timbr.ai/doc/assets/images/timbr-langgraph-fcf8e2eb7e26dc9dfa8b56b62937281e.png)
145
+
146
+ ## Dependencies
147
+
148
+ - Access to a timbr-server
149
+ - Python 3.10 or newer
150
+
151
+ ## Installation
152
+
153
+ ### Using pip
154
+
155
+ ```bash
156
+ python -m pip install langchain-timbr
157
+ ```
158
+
159
+ ### Install with selected LLM providers
160
+
161
+ #### One of: openai, anthropic, google, azure_openai, snowflake, databricks, vertex_ai (or 'all')
162
+
163
+ ```bash
164
+ python -m pip install 'langchain-timbr[<your selected providers, separated by comma w/o space>]'
165
+ ```
166
+
167
+ ### Using pip from github
168
+
169
+ ```bash
170
+ pip install git+https://github.com/WPSemantix/langchain-timbr
171
+ ```
172
+
173
+ ## Documentation
174
+
175
+ For comprehensive documentation and usage examples, please visit:
176
+
177
+ - [Timbr LangChain Documentation](https://docs.timbr.ai/doc/docs/integration/langchain-sdk)
178
+ - [Timbr LangGraph Documentation](https://docs.timbr.ai/doc/docs/integration/langgraph-sdk)
179
+
180
+ ## Configuration
181
+
182
+ The SDK uses environment variables for configuration. All configurations are optional - when set, they serve as default values for `langchain-timbr` provided tools. Below are all available configuration options:
183
+
184
+ ### Configuration Options
185
+
186
+ #### Timbr Connection Settings
187
+
188
+ - **`TIMBR_URL`** - The URL of your Timbr server
189
+ - **`TIMBR_TOKEN`** - Authentication token for accessing the Timbr server
190
+ - **`TIMBR_ONTOLOGY`** - The ontology to use (also accepts `ONTOLOGY` as an alias)
191
+ - **`IS_JWT`** - Whether the token is a JWT token (true/false)
192
+ - **`JWT_TENANT_ID`** - Tenant ID for JWT authentication
193
+
194
+ #### Cache and Data Processing
195
+
196
+ - **`CACHE_TIMEOUT`** - Timeout for caching operations in seconds
197
+ - **`IGNORE_TAGS`** - Comma-separated list of tags to ignore during processing
198
+ - **`IGNORE_TAGS_PREFIX`** - Comma-separated list of tag prefixes to ignore during processing
199
+
200
+ #### LLM Configuration
201
+
202
+ - **`LLM_TYPE`** - The type of LLM provider to use
203
+ - **`LLM_MODEL`** - The specific model to use with the LLM provider
204
+ - **`LLM_API_KEY`** - API key or client secret for the LLM provider
205
+ - **`LLM_TEMPERATURE`** - Temperature setting for LLM responses (controls randomness)
206
+ - **`LLM_ADDITIONAL_PARAMS`** - Additional parameters to pass to the LLM
207
+ - **`LLM_TIMEOUT`** - Timeout for LLM requests in seconds
208
+ - **`LLM_TENANT_ID`** - LLM provider tenant/directory ID (Used for Service Principal authentication)
209
+ - **`LLM_CLIENT_ID`** - LLM provider client ID (Used for Service Principal authentication)
210
+ - **`LLM_CLIENT_SECRET`** - LLM provider client secret (Used for Service Principal authentication)
211
+ - **`LLM_ENDPOINT`** - LLM provider OpenAI endpoint URL
212
+ - **`LLM_API_VERSION`** - LLM provider API version
213
+ - **`LLM_SCOPE`** - LLM provider authentication scope
@@ -3,7 +3,7 @@
3
3
  [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr.svg?type=shield&issueType=security)](https://app.fossa.com/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr?ref=badge_shield&issueType=security)
4
4
  [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr.svg?type=shield&issueType=license)](https://app.fossa.com/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr?ref=badge_shield&issueType=license)
5
5
 
6
- [![Python 3.9](https://img.shields.io/badge/python-3.9-blue)](https://www.python.org/downloads/release/python-3921/)
6
+
7
7
  [![Python 3.10](https://img.shields.io/badge/python-3.10-blue.svg)](https://www.python.org/downloads/release/python-31017/)
8
8
  [![Python 3.11](https://img.shields.io/badge/python-3.11-blue.svg)](https://www.python.org/downloads/release/python-31112/)
9
9
  [![Python 3.12](https://img.shields.io/badge/python-3.12-blue.svg)](https://www.python.org/downloads/release/python-3129/)
@@ -17,7 +17,7 @@ Timbr LangChain LLM SDK is a Python SDK that extends LangChain and LangGraph wit
17
17
  ## Dependencies
18
18
 
19
19
  - Access to a timbr-server
20
- - Python 3.9.13 or newer
20
+ - Python 3.10 or newer
21
21
 
22
22
  ## Installation
23
23
 
@@ -0,0 +1,173 @@
1
+ [build-system]
2
+ requires = ["hatchling>=1.25", "hatch-vcs>=0.4"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "langchain-timbr"
7
+ dynamic = ["version"]
8
+ description = "LangChain & LangGraph extensions that parse LLM prompts into Timbr semantic SQL and execute them."
9
+ readme = "README.md"
10
+ requires-python = ">=3.10,<3.13"
11
+ license = {text = "MIT"}
12
+ authors = [{ name = "Timbr.ai", email = "contact@timbr.ai" }]
13
+ keywords = ["LLM", "LangChain", "LangGraph", "Timbr", "Semantic Layer", "Knowledge Graph", "SQL", "Agents"]
14
+ classifiers = [
15
+ "License :: OSI Approved :: MIT License",
16
+ "Programming Language :: Python :: 3",
17
+ "Programming Language :: Python :: 3 :: Only",
18
+ "Programming Language :: Python :: 3.10",
19
+ "Programming Language :: Python :: 3.11",
20
+ "Programming Language :: Python :: 3.12",
21
+ "Intended Audience :: Developers",
22
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
23
+ ]
24
+
25
+ dependencies = [
26
+ # Core dependencies that are the same across all Python versions
27
+ "anthropic==0.42.0",
28
+ "databricks-langchain==0.7.1",
29
+ "databricks-sdk==0.64.0",
30
+ "google-generativeai==0.8.4",
31
+ "pydantic==2.10.4",
32
+ "pytest==8.3.4",
33
+ "tiktoken==0.8.0",
34
+ "uvicorn==0.34.0",
35
+
36
+ # Python 3.11+ versions (same for 3.11 and 3.12+)
37
+ "azure-identity==1.25.0; python_version>='3.11'",
38
+ "cryptography==45.0.7; python_version>='3.11'",
39
+ "langchain==0.3.27; python_version>='3.11'",
40
+ "langchain-anthropic==0.3.5; python_version>='3.11'",
41
+ "langchain-community==0.3.30; python_version>='3.11'",
42
+ "langchain-core==0.3.78; python_version>='3.11'",
43
+ "langchain-google-genai==2.0.10; python_version>='3.11'",
44
+ "langchain-google-vertexai==2.1.2; python_version>='3.11'",
45
+ "langchain-openai==0.3.34; python_version>='3.11'",
46
+ "langchain-tests==0.3.22; python_version>='3.11'",
47
+ "langgraph==0.6.8; python_version>='3.11'",
48
+ "openai==2.1.0; python_version>='3.11'",
49
+ "opentelemetry-api==1.38.0; python_version=='3.10'",
50
+ "opentelemetry-sdk==1.38.0; python_version=='3.10'",
51
+ "pytimbr_api==2.0.0; python_version>='3.11'",
52
+ "snowflake==1.8.0; python_version>='3.11'",
53
+ "snowflake-snowpark-python==1.39.1; python_version>='3.11'",
54
+ "transformers==4.57.0; python_version>='3.11'",
55
+
56
+ # Python 3.10 specific versions
57
+ "azure-identity>=1.16.1; python_version=='3.10'",
58
+ "cryptography>=44.0.3; python_version=='3.10'",
59
+ "langchain>=0.3.25; python_version=='3.10'",
60
+ "langchain-anthropic>=0.3.1; python_version=='3.10'",
61
+ "langchain_community>=0.3.20; python_version=='3.10'",
62
+ "langchain-core>=0.3.58; python_version=='3.10'",
63
+ "langchain-google-genai>=2.0.9; python_version=='3.10'",
64
+ "langchain-google-vertexai>=2.0.28; python_version=='3.10'",
65
+ "langchain-openai>=0.3.16; python_version=='3.10'",
66
+ "langchain-tests>=0.3.20; python_version=='3.10'",
67
+ "langgraph>=0.3.20; python_version=='3.10'",
68
+ "openai>=1.77.0; python_version=='3.10'",
69
+ "opentelemetry-api==1.38.0; python_version=='3.10'",
70
+ "opentelemetry-sdk==1.38.0; python_version=='3.10'",
71
+ "pytimbr-api>=2.0.0; python_version=='3.10'",
72
+ "snowflake>=1.8.0; python_version=='3.10'",
73
+ "snowflake-snowpark-python>=1.39.1; python_version=='3.10'",
74
+ "transformers>=4.53; python_version=='3.10'",
75
+ ]
76
+
77
+ [project.optional-dependencies]
78
+ # LLM providers - using conditional versions
79
+ openai = [
80
+ "openai==2.1.0; python_version>='3.11'",
81
+ "langchain-openai==0.3.34; python_version>='3.11'",
82
+ "openai>=1.77.0; python_version=='3.10'",
83
+ "langchain-openai>=0.3.16; python_version=='3.10'"
84
+ ]
85
+ azure_openai = [
86
+ "azure-identity==1.25.0; python_version>='3.11'",
87
+ "openai==2.1.0; python_version>='3.11'",
88
+ "langchain-openai==0.3.34; python_version>='3.11'",
89
+ "azure-identity>=1.16.1; python_version=='3.10'",
90
+ "openai>=1.77.0; python_version=='3.10'",
91
+ "langchain-openai>=0.3.16; python_version=='3.10'"
92
+ ]
93
+ anthropic = [
94
+ "anthropic==0.42.0",
95
+ "langchain-anthropic==0.3.5; python_version>='3.11'",
96
+ "langchain-anthropic>=0.3.1; python_version=='3.10'"
97
+ ]
98
+ google = [
99
+ "langchain-google-genai==2.0.10; python_version>='3.11'",
100
+ "google-generativeai==0.8.4",
101
+ "langchain-google-genai>=2.0.9; python_version=='3.10'"
102
+ ]
103
+ vertex_ai = [
104
+ "langchain-google-vertexai==2.1.2; python_version>='3.11'",
105
+ "google-generativeai==0.8.4",
106
+ "langchain-google-vertexai>=2.0.28; python_version=='3.10'"
107
+ ]
108
+ snowflake = [
109
+ "opentelemetry-api==1.38.0; python_version<'3.12'",
110
+ "opentelemetry-sdk==1.38.0; python_version<'3.12'",
111
+ "snowflake==1.8.0; python_version>='3.11'",
112
+ "snowflake-snowpark-python==1.39.1; python_version>='3.11'",
113
+ "snowflake>=1.8.0; python_version=='3.10'",
114
+ "snowflake-snowpark-python>=1.39.1; python_version=='3.10'"
115
+ ]
116
+ databricks = [
117
+ "databricks-langchain==0.7.1",
118
+ "databricks-sdk==0.64.0"
119
+ ]
120
+
121
+ # Development and testing
122
+ dev = [
123
+ "pytest==8.3.4",
124
+ "langchain-tests==0.3.22; python_version>='3.11'",
125
+ "uvicorn==0.34.0",
126
+ "langchain-tests>=0.3.20; python_version=='3.10'",
127
+ ]
128
+
129
+ # All optional dependencies
130
+ all = [
131
+ "anthropic==0.42.0",
132
+ "google-generativeai==0.8.4",
133
+ "pytest==8.3.4",
134
+ "uvicorn==0.34.0",
135
+ "databricks-langchain==0.7.1",
136
+ "databricks-sdk==0.64.0",
137
+ # Python 3.11+ versions
138
+ "azure-identity==1.25.0; python_version>='3.11'",
139
+ "langchain-anthropic==0.3.5; python_version>='3.11'",
140
+ "openai==2.1.0; python_version>='3.11'",
141
+ "langchain-openai==0.3.34; python_version>='3.11'",
142
+ "langchain-google-genai==2.0.10; python_version>='3.11'",
143
+ "langchain-google-vertexai==2.1.2; python_version>='3.11'",
144
+ "snowflake==1.8.0; python_version>='3.11'",
145
+ "snowflake-snowpark-python==1.39.1; python_version>='3.11'",
146
+ "langchain-tests==0.3.22; python_version>='3.11'",
147
+ # Python 3.10 versions
148
+ "azure-identity>=1.16.1; python_version=='3.10'",
149
+ "langchain-anthropic>=0.3.1; python_version=='3.10'",
150
+ "openai>=1.77.0; python_version=='3.10'",
151
+ "langchain-openai>=0.3.16; python_version=='3.10'",
152
+ "langchain-google-genai>=2.0.9; python_version=='3.10'",
153
+ "langchain-google-vertexai>=2.0.28; python_version=='3.10'",
154
+ "snowflake>=1.8.0; python_version=='3.10'",
155
+ "snowflake-snowpark-python>=1.39.1; python_version=='3.10'",
156
+ "langchain-tests>=0.3.20; python_version=='3.10'",
157
+ ]
158
+
159
+ [project.urls]
160
+ Homepage = "https://github.com/WPSemantix/langchain-timbr"
161
+ Documentation = "https://docs.timbr.ai/doc/docs/integration/langchain-sdk/"
162
+ Source = "https://github.com/WPSemantix/langchain-timbr"
163
+ Issues = "https://github.com/WPSemantix/langchain-timbr/issues"
164
+
165
+ [tool.hatch.version]
166
+ source = "vcs" # version comes from git
167
+
168
+ [tool.hatch.build.hooks.vcs]
169
+ version-file = "src/langchain_timbr/_version.py" # generate a version file
170
+
171
+ [tool.hatch.build.targets.wheel]
172
+ packages = ["src/langchain_timbr"]
173
+
@@ -0,0 +1,24 @@
1
+ anthropic==0.42.0
2
+ azure-identity==1.25.0
3
+ cryptography==45.0.7
4
+ databricks-langchain==0.7.1
5
+ databricks-sdk==0.64.0
6
+ google-generativeai==0.8.4
7
+ langchain==0.3.27
8
+ langchain-anthropic==0.3.5
9
+ langchain-community==0.3.30
10
+ langchain-core==0.3.78
11
+ langchain-google-genai==2.0.10
12
+ langchain-google-vertexai==2.1.2
13
+ langchain-openai==0.3.34
14
+ langchain-tests==0.3.22
15
+ langgraph==0.6.8
16
+ openai==2.1.0
17
+ pydantic==2.10.4
18
+ pytest==8.3.4
19
+ pytimbr_api==2.0.0
20
+ snowflake==1.8.0
21
+ snowflake-snowpark-python==1.39.1
22
+ tiktoken==0.8.0
23
+ transformers==4.57.0
24
+ uvicorn==0.34.0
@@ -1,8 +1,7 @@
1
1
  anthropic==0.42.0
2
2
  azure-identity>=1.16.1
3
3
  cryptography>=44.0.3
4
- databricks-langchain==0.3.0; python_version < '3.10'
5
- databricks-langchain==0.7.1; python_version >= '3.10'
4
+ databricks-langchain==0.7.1
6
5
  databricks-sdk==0.64.0
7
6
  google-generativeai==0.8.4
8
7
  langchain>=0.3.25
@@ -15,12 +14,14 @@ langchain-openai>=0.3.16
15
14
  langchain-tests>=0.3.20
16
15
  langgraph>=0.3.20
17
16
  openai>=1.77.0
18
- pyarrow>=19.0.1,<20.0.0
17
+ opentelemetry-api==1.38.0 # Required when using snowflake with Python under 3.12
18
+ opentelemetry-sdk==1.38.0 # Required when using snowflake with Python under 3.12
19
19
  pydantic==2.10.4
20
20
  pytest==8.3.4
21
21
  pytimbr-api>=2.0.0
22
- snowflake>=0.8.0
23
- snowflake-snowpark-python>=1.6.0
22
+ snowflake>=1.8.0
23
+ snowflake-snowpark-python>=1.23.0
24
+ snowflake-connector-python[pandas]>=3.13.1,<4
24
25
  tiktoken==0.8.0
25
26
  transformers>=4.53
26
27
  uvicorn==0.34.0
@@ -0,0 +1,26 @@
1
+ anthropic==0.42.0
2
+ azure-identity==1.25.0
3
+ cryptography==45.0.7
4
+ databricks-langchain==0.7.1
5
+ databricks-sdk==0.64.0
6
+ google-generativeai==0.8.4
7
+ langchain==0.3.27
8
+ langchain-anthropic==0.3.5
9
+ langchain-community==0.3.30
10
+ langchain-core==0.3.78
11
+ langchain-google-genai==2.0.10
12
+ langchain-google-vertexai==2.1.2
13
+ langchain-openai==0.3.34
14
+ langchain-tests==0.3.22
15
+ langgraph==0.6.8
16
+ openai==2.1.0
17
+ opentelemetry-api==1.38.0 # Required when using snowflake with Python under 3.12
18
+ opentelemetry-sdk==1.38.0 # Required when using snowflake with Python under 3.12
19
+ pydantic==2.10.4
20
+ pytest==8.3.4
21
+ pytimbr_api==2.0.0
22
+ snowflake==1.8.0
23
+ snowflake-snowpark-python==1.39.1
24
+ tiktoken==0.8.0
25
+ transformers==4.57.0
26
+ uvicorn==0.34.0
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '2.0.3'
32
- __version_tuple__ = version_tuple = (2, 0, 3)
31
+ __version__ = version = '2.1.0'
32
+ __version_tuple__ = version_tuple = (2, 1, 0)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -138,29 +138,30 @@ def _prompt_to_string(prompt: Any) -> str:
138
138
  def _calculate_token_count(llm: LLM, prompt: str) -> int:
139
139
  """
140
140
  Calculate the token count for a given prompt text using the specified LLM.
141
- Falls back to tiktoken if the LLM doesn't support token counting.
141
+ Falls back to basic if the LLM doesn't support token counting.
142
142
  """
143
+ import tiktoken
143
144
  token_count = 0
145
+
146
+ encoding = None
144
147
  try:
145
- if hasattr(llm, "get_num_tokens_from_messages"):
146
- token_count = llm.get_num_tokens_from_messages(prompt)
148
+ if hasattr(llm, 'client') and hasattr(llm.client, 'model_name'):
149
+ encoding = tiktoken.encoding_for_model(llm.client.model_name)
147
150
  except Exception as e:
148
- #print(f"Error with primary token counting: {e}")
151
+ print(f"Error with primary token counting: {e}")
149
152
  pass
150
153
 
151
- # Use tiktoken as fallback if token_count is still 0
152
- if token_count == 0:
153
- try:
154
- import tiktoken
154
+ try:
155
+ if encoding is None:
155
156
  encoding = tiktoken.get_encoding("cl100k_base")
156
- if isinstance(prompt, str):
157
- token_count = len(encoding.encode(prompt))
158
- else:
159
- prompt_text = _prompt_to_string(prompt)
160
- token_count = len(encoding.encode(prompt_text))
161
- except Exception as e2:
162
- #print(f"Error calculating token count with fallback method: {e2}")
163
- pass
157
+ if isinstance(prompt, str):
158
+ token_count = len(encoding.encode(prompt))
159
+ else:
160
+ prompt_text = _prompt_to_string(prompt)
161
+ token_count = len(encoding.encode(prompt_text))
162
+ except Exception as e2:
163
+ #print(f"Error calculating token count with fallback method: {e2}")
164
+ pass
164
165
 
165
166
  return token_count
166
167
 
@@ -179,6 +180,10 @@ def _get_response_text(response: Any) -> str:
179
180
  else:
180
181
  raise ValueError("Unexpected response format from LLM.")
181
182
 
183
+ if "QUESTION VALIDATION ERROR:" in response_text:
184
+ err = response_text.split("QUESTION VALIDATION ERROR:", 1)[1].strip()
185
+ raise ValueError(err)
186
+
182
187
  return response_text
183
188
 
184
189
  def _extract_usage_metadata(response: Any) -> dict:
@@ -220,7 +220,7 @@ class TestChainExamples:
220
220
  chain = ExecuteTimbrQueryChain(**config_dict)
221
221
 
222
222
  # Test basic functionality
223
- result = chain.invoke({"prompt": f"test configuration {i}"})
223
+ result = chain.invoke({"prompt": "Count all orders"})
224
224
  assert isinstance(result, dict)
225
225
 
226
226
  except Exception as e: