langchain-timbr 1.5.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. langchain_timbr-1.5.0/.github/dependabot.yml +16 -0
  2. langchain_timbr-1.5.0/.github/pull_request_template.md +30 -0
  3. langchain_timbr-1.5.0/.github/workflows/_codespell.yml +39 -0
  4. langchain_timbr-1.5.0/.github/workflows/_lint.yml +102 -0
  5. langchain_timbr-1.5.0/.github/workflows/fossa.yml +15 -0
  6. langchain_timbr-1.5.0/.github/workflows/install-dependencies-and-run-tests.yml +59 -0
  7. langchain_timbr-1.5.0/.github/workflows/publish.yml +18 -0
  8. langchain_timbr-1.5.0/.gitignore +20 -0
  9. langchain_timbr-1.5.0/LICENSE +21 -0
  10. langchain_timbr-1.5.0/PKG-INFO +103 -0
  11. langchain_timbr-1.5.0/README.md +40 -0
  12. langchain_timbr-1.5.0/pyproject.toml +76 -0
  13. langchain_timbr-1.5.0/pytest.ini +15 -0
  14. langchain_timbr-1.5.0/requirements.txt +21 -0
  15. langchain_timbr-1.5.0/src/langchain_timbr/__init__.py +17 -0
  16. langchain_timbr-1.5.0/src/langchain_timbr/config.py +21 -0
  17. langchain_timbr-1.5.0/src/langchain_timbr/langchain/__init__.py +16 -0
  18. langchain_timbr-1.5.0/src/langchain_timbr/langchain/execute_timbr_query_chain.py +307 -0
  19. langchain_timbr-1.5.0/src/langchain_timbr/langchain/generate_answer_chain.py +99 -0
  20. langchain_timbr-1.5.0/src/langchain_timbr/langchain/generate_timbr_sql_chain.py +176 -0
  21. langchain_timbr-1.5.0/src/langchain_timbr/langchain/identify_concept_chain.py +138 -0
  22. langchain_timbr-1.5.0/src/langchain_timbr/langchain/timbr_sql_agent.py +418 -0
  23. langchain_timbr-1.5.0/src/langchain_timbr/langchain/validate_timbr_sql_chain.py +187 -0
  24. langchain_timbr-1.5.0/src/langchain_timbr/langgraph/__init__.py +13 -0
  25. langchain_timbr-1.5.0/src/langchain_timbr/langgraph/execute_timbr_query_node.py +108 -0
  26. langchain_timbr-1.5.0/src/langchain_timbr/langgraph/generate_response_node.py +59 -0
  27. langchain_timbr-1.5.0/src/langchain_timbr/langgraph/generate_timbr_sql_node.py +98 -0
  28. langchain_timbr-1.5.0/src/langchain_timbr/langgraph/identify_concept_node.py +78 -0
  29. langchain_timbr-1.5.0/src/langchain_timbr/langgraph/validate_timbr_query_node.py +100 -0
  30. langchain_timbr-1.5.0/src/langchain_timbr/llm_wrapper/llm_wrapper.py +189 -0
  31. langchain_timbr-1.5.0/src/langchain_timbr/llm_wrapper/timbr_llm_wrapper.py +41 -0
  32. langchain_timbr-1.5.0/src/langchain_timbr/timbr_llm_connector.py +398 -0
  33. langchain_timbr-1.5.0/src/langchain_timbr/utils/general.py +70 -0
  34. langchain_timbr-1.5.0/src/langchain_timbr/utils/prompt_service.py +330 -0
  35. langchain_timbr-1.5.0/src/langchain_timbr/utils/temperature_supported_models.json +62 -0
  36. langchain_timbr-1.5.0/src/langchain_timbr/utils/timbr_llm_utils.py +575 -0
  37. langchain_timbr-1.5.0/src/langchain_timbr/utils/timbr_utils.py +475 -0
  38. langchain_timbr-1.5.0/tests/README.md +12 -0
  39. langchain_timbr-1.5.0/tests/conftest.py +54 -0
  40. langchain_timbr-1.5.0/tests/integration/test_agent_integration.py +152 -0
  41. langchain_timbr-1.5.0/tests/integration/test_azure_openai_model.py +101 -0
  42. langchain_timbr-1.5.0/tests/integration/test_chain_pipeline.py +39 -0
  43. langchain_timbr-1.5.0/tests/integration/test_jwt_token.py +57 -0
  44. langchain_timbr-1.5.0/tests/integration/test_langchain_chains.py +404 -0
  45. langchain_timbr-1.5.0/tests/integration/test_langgraph_nodes.py +113 -0
  46. langchain_timbr-1.5.0/tests/integration/test_timeout_functionality.py +90 -0
  47. langchain_timbr-1.5.0/tests/standard/conftest.py +24 -0
  48. langchain_timbr-1.5.0/tests/standard/test_chain_documentation.py +269 -0
  49. langchain_timbr-1.5.0/tests/standard/test_standard_chain_requirements.py +335 -0
  50. langchain_timbr-1.5.0/tests/standard/test_unit_tests.py +176 -0
@@ -0,0 +1,16 @@
1
+ # To get started with Dependabot version updates, you'll need to specify which
2
+ # package ecosystems to update and where the package manifests are located.
3
+ # Please see the documentation for all configuration options:
4
+ # https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
5
+
6
+ version: 2
7
+ updates:
8
+ - package-ecosystem: "pip" # See documentation for possible values
9
+ directory: "/" # Location of package manifests
10
+ schedule:
11
+ interval: "monthly"
12
+ day: "sunday"
13
+ target-branch: "dependencies-updates"
14
+ labels:
15
+ - "dependencies"
16
+ - "automated"
@@ -0,0 +1,30 @@
1
+ ### Description
2
+ <!--- Copy a summary or requirements of the bug/features section of the JIRA ticket -->
3
+
4
+ ### Type of Change
5
+ <!--- Check any relevant boxes with "x" -->
6
+ - [ ] New feature | task (non-breaking change which adds functionality)
7
+ - [ ] Bug fix (non-breaking change which fixes an issue)
8
+ - [ ] Breaking change
9
+
10
+ ### TESTS
11
+ <!--- Specify number of tests added or changed -->
12
+ Number of tests added/changed:
13
+
14
+ ### ADDITIONAL INFORMATION
15
+ <!--- Check any relevant boxes with "x" -->
16
+ <!--- HINT: Enter the ticket number as TP-XXXX format -->
17
+ - [ ] JIRA Ticket:
18
+ - [ ] Has associated issue:
19
+ - [ ] Removes existing feature or API
20
+ - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
21
+ - [ ] This change requires a documentation update
22
+
23
+ ### Checklist:
24
+ - [ ] My code follows the style guidelines of this project
25
+ - [ ] I have performed a self-review of my own code
26
+ - [ ] I have commented my code, particularly in hard-to-understand areas
27
+ - [ ] I have made corresponding changes to the documentation
28
+ - [ ] My changes generate no new warnings
29
+ - [ ] I have added tests that prove my fix is effective or that my feature works
30
+ - [ ] New and existing unit tests pass locally with my changes
@@ -0,0 +1,39 @@
1
+ ---
2
+ name: make spell_check
3
+
4
+ on:
5
+ workflow_call:
6
+ inputs:
7
+ working-directory:
8
+ required: true
9
+ type: string
10
+ description: "From which folder this pipeline executes"
11
+
12
+ permissions:
13
+ contents: read
14
+
15
+ jobs:
16
+ codespell:
17
+ name: (Check for spelling errors)
18
+ runs-on: ubuntu-latest
19
+
20
+ steps:
21
+ - name: Checkout
22
+ uses: actions/checkout@v4
23
+
24
+ - name: Install Dependencies
25
+ run: |
26
+ pip install toml
27
+
28
+ - name: Extract Ignore Words List
29
+ working-directory: ${{ inputs.working-directory }}
30
+ run: |
31
+ # Use a Python script to extract the ignore words list from pyproject.toml
32
+ python ../../.github/workflows/extract_ignored_words_list.py
33
+ id: extract_ignore_words
34
+
35
+ - name: Codespell
36
+ uses: codespell-project/actions-codespell@v2
37
+ with:
38
+ skip: guide_imports.json
39
+ ignore_words_list: ${{ steps.extract_ignore_words.outputs.ignore_words_list }}
@@ -0,0 +1,102 @@
1
+ name: lint
2
+
3
+ on:
4
+ workflow_call:
5
+ inputs:
6
+ working-directory:
7
+ required: true
8
+ type: string
9
+ description: "From which folder this pipeline executes"
10
+
11
+ env:
12
+ POETRY_VERSION: "1.7.1"
13
+ WORKDIR: ${{ inputs.working-directory == '' && '.' || inputs.working-directory }}
14
+
15
+ # This env var allows us to get inline annotations when ruff has complaints.
16
+ RUFF_OUTPUT_FORMAT: github
17
+
18
+ jobs:
19
+ build:
20
+ name: "make lint #${{ matrix.python-version }}"
21
+ runs-on: ubuntu-latest
22
+ strategy:
23
+ matrix:
24
+ # Only lint on the min and max supported Python versions.
25
+ # It's extremely unlikely that there's a lint issue on any version in between
26
+ # that doesn't show up on the min or max versions.
27
+ #
28
+ # GitHub rate-limits how many jobs can be running at any one time.
29
+ # Starting new jobs is also relatively slow,
30
+ # so linting on fewer versions makes CI faster.
31
+ python-version:
32
+ - "3.9"
33
+ - "3.12"
34
+ steps:
35
+ - uses: actions/checkout@v4
36
+
37
+ - name: Set up Python ${{ matrix.python-version }} + Poetry ${{ env.POETRY_VERSION }}
38
+ uses: "./.github/actions/poetry_setup"
39
+ with:
40
+ python-version: ${{ matrix.python-version }}
41
+ poetry-version: ${{ env.POETRY_VERSION }}
42
+ working-directory: ${{ inputs.working-directory }}
43
+ cache-key: lint-with-extras
44
+
45
+ - name: Check Poetry File
46
+ shell: bash
47
+ working-directory: ${{ inputs.working-directory }}
48
+ run: |
49
+ poetry check
50
+
51
+ - name: Check lock file
52
+ shell: bash
53
+ working-directory: ${{ inputs.working-directory }}
54
+ run: |
55
+ poetry lock --check
56
+
57
+ - name: Install dependencies
58
+ # Also installs dev/lint/test/typing dependencies, to ensure we have
59
+ # type hints for as many of our libraries as possible.
60
+ # This helps catch errors that require dependencies to be spotted, for example:
61
+ # https://github.com/langchain-ai/langchain/pull/10249/files#diff-935185cd488d015f026dcd9e19616ff62863e8cde8c0bee70318d3ccbca98341
62
+ #
63
+ # If you change this configuration, make sure to change the `cache-key`
64
+ # in the `poetry_setup` action above to stop using the old cache.
65
+ # It doesn't matter how you change it, any change will cause a cache-bust.
66
+ working-directory: ${{ inputs.working-directory }}
67
+ run: |
68
+ poetry install --with lint,typing,mmr
69
+
70
+ - name: Get .mypy_cache to speed up mypy
71
+ uses: actions/cache@v4
72
+ env:
73
+ SEGMENT_DOWNLOAD_TIMEOUT_MIN: "2"
74
+ with:
75
+ path: |
76
+ ${{ env.WORKDIR }}/.mypy_cache
77
+ key: mypy-lint-${{ runner.os }}-${{ runner.arch }}-py${{ matrix.python-version }}-${{ inputs.working-directory }}-${{ hashFiles(format('{0}/poetry.lock', inputs.working-directory)) }}
78
+
79
+
80
+ - name: Analysing the code with our lint
81
+ working-directory: ${{ inputs.working-directory }}
82
+ run: |
83
+ make lint_package
84
+
85
+ - name: Install unit+integration test dependencies
86
+ working-directory: ${{ inputs.working-directory }}
87
+ run: |
88
+ poetry install --with test,test_integration
89
+
90
+ - name: Get .mypy_cache_test to speed up mypy
91
+ uses: actions/cache@v4
92
+ env:
93
+ SEGMENT_DOWNLOAD_TIMEOUT_MIN: "2"
94
+ with:
95
+ path: |
96
+ ${{ env.WORKDIR }}/.mypy_cache_test
97
+ key: mypy-test-${{ runner.os }}-${{ runner.arch }}-py${{ matrix.python-version }}-${{ inputs.working-directory }}-${{ hashFiles(format('{0}/poetry.lock', inputs.working-directory)) }}
98
+
99
+ - name: Analysing the code with our lint
100
+ working-directory: ${{ inputs.working-directory }}
101
+ run: |
102
+ make lint_tests
@@ -0,0 +1,15 @@
1
+ # .github/workflows/fossa.yml
2
+ name: fossa
3
+ on:
4
+ push: { branches: ["main"] }
5
+ pull_request: {}
6
+ jobs:
7
+ fossa:
8
+ runs-on: ubuntu-latest
9
+ steps:
10
+ - uses: actions/checkout@v4
11
+ - name: Run FOSSA Scan
12
+ uses: fossas/fossa-action@v1
13
+ with:
14
+ api-key: ${{ secrets.FOSSA_API_KEY }}
15
+ run-tests: true
@@ -0,0 +1,59 @@
1
+ name: Install Dependencies and Run Tests
2
+
3
+ on:
4
+ push:
5
+ branches: [ main, master ]
6
+ pull_request:
7
+ branches: [ main, master ]
8
+
9
+ jobs:
10
+ run-tests:
11
+ runs-on: ubuntu-latest
12
+ # strategy:
13
+ # matrix:
14
+ # python-version: [3.9, '3.10']
15
+
16
+ steps:
17
+ - name: Checkout repository
18
+ uses: actions/checkout@v3
19
+
20
+ # - name: Set up Python ${{ matrix.python-version }}
21
+ - name: Set up Python
22
+ uses: actions/setup-python@v4
23
+ with:
24
+ python-version: '3.9'
25
+ # python-version: ${{ matrix.python-version }}
26
+
27
+ - name: Install dependencies
28
+ run: |
29
+ python -m pip install --upgrade pip
30
+ if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
31
+ if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
32
+ pip install pytest
33
+
34
+ - name: Run tests
35
+ env:
36
+ TIMBR_URL: ${{ secrets.TIMBR_URL }}
37
+ TIMBR_TOKEN: ${{ secrets.TIMBR_TOKEN }}
38
+ LLM_TYPE: ${{ secrets.LLM_TYPE }}
39
+ LLM_MODEL: ${{ secrets.LLM_MODEL }}
40
+ LLM_API_KEY: ${{ secrets.LLM_API_KEY }}
41
+ JWT_TIMBR_URL: ${{ secrets.JWT_TIMBR_URL }}
42
+ JWT_TIMBR_ONTOLOGY: ${{ secrets.JWT_TIMBR_ONTOLOGY }}
43
+ JWT_TENANT_ID: ${{ secrets.JWT_TENANT_ID }}
44
+ JWT_CLIENT_ID: ${{ secrets.JWT_CLIENT_ID }}
45
+ JWT_USERNAME: ${{ secrets.JWT_USERNAME }}
46
+ JWT_PASSWORD: ${{ secrets.JWT_PASSWORD }}
47
+ JWT_SCOPE: ${{ secrets.JWT_SCOPE }}
48
+ JWT_SECRET: ${{ secrets.JWT_SECRET }}
49
+ run: |
50
+ python -m pytest -v --maxfail=1 --disable-warnings -q
51
+
52
+ # - name: Upload test results
53
+ # uses: actions/upload-artifact@v4
54
+ # if: always()
55
+ # with:
56
+ # name: test-results-${{ matrix.python-version }}
57
+ # path: |
58
+ # .pytest_cache
59
+ # test-reports/
@@ -0,0 +1,18 @@
1
+ name: publish
2
+ on:
3
+ push:
4
+ tags: ["v*.*.*"]
5
+
6
+ jobs:
7
+ build-publish:
8
+ runs-on: ubuntu-latest
9
+ steps:
10
+ - uses: actions/checkout@v4
11
+ - uses: actions/setup-python@v5
12
+ with: { python-version: "3.12" }
13
+ - run: python -m pip install --upgrade build twine
14
+ - run: python -m build
15
+ - env:
16
+ TWINE_USERNAME: __token__
17
+ TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
18
+ run: twine upload dist/*
@@ -0,0 +1,20 @@
1
+ # IDE
2
+ .vscode
3
+
4
+ # build artifacts
5
+ dist/
6
+ build/
7
+ *.egg-info/
8
+ output/
9
+
10
+ # Python cache
11
+ __pycache__/
12
+ *.pyc
13
+ **/*.pyc
14
+ *.pyo
15
+
16
+ # virtualenvs
17
+ .env
18
+ env
19
+ venv
20
+ .DS_Store
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Timbr.ai
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,103 @@
1
+ Metadata-Version: 2.4
2
+ Name: langchain-timbr
3
+ Version: 1.5.0
4
+ Summary: LangChain & LangGraph extensions that parse LLM prompts into Timbr semantic SQL and execute them.
5
+ Project-URL: Homepage, https://github.com/WPSemantix/langchain-timbr
6
+ Project-URL: Documentation, https://docs.timbr.ai/doc/docs/integration/langchain-sdk/
7
+ Project-URL: Source, https://github.com/WPSemantix/langchain-timbr
8
+ Project-URL: Issues, https://github.com/WPSemantix/langchain-timbr/issues
9
+ Author-email: Bar Cohen <barco@timbr.ai>
10
+ License: MIT
11
+ License-File: LICENSE
12
+ Keywords: Agents,Knowledge Graph,LLM,LangChain,LangGraph,SQL,Semantic Layer,Timbr
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3 :: Only
17
+ Classifier: Programming Language :: Python :: 3.9
18
+ Classifier: Programming Language :: Python :: 3.10
19
+ Classifier: Programming Language :: Python :: 3.11
20
+ Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
22
+ Requires-Python: <3.13,>=3.9
23
+ Requires-Dist: cryptography>=44.0.3
24
+ Requires-Dist: langchain-community>=0.3.20
25
+ Requires-Dist: langchain-core>=0.3.58
26
+ Requires-Dist: langchain>=0.3.25
27
+ Requires-Dist: langgraph>=0.3.20
28
+ Requires-Dist: pydantic==2.10.4
29
+ Requires-Dist: pytimbr-api>=2.0.0
30
+ Requires-Dist: tiktoken==0.8.0
31
+ Requires-Dist: transformers>=4.51.3
32
+ Provides-Extra: all
33
+ Requires-Dist: anthropic==0.42.0; extra == 'all'
34
+ Requires-Dist: google-generativeai==0.8.4; extra == 'all'
35
+ Requires-Dist: langchain-anthropic>=0.3.1; extra == 'all'
36
+ Requires-Dist: langchain-google-genai>=2.0.9; extra == 'all'
37
+ Requires-Dist: langchain-openai>=0.3.16; extra == 'all'
38
+ Requires-Dist: langchain-tests>=0.3.20; extra == 'all'
39
+ Requires-Dist: openai==1.77.0; extra == 'all'
40
+ Requires-Dist: pyarrow<19.0.0; extra == 'all'
41
+ Requires-Dist: pytest==8.3.4; extra == 'all'
42
+ Requires-Dist: snowflake-snowpark-python>=1.6.0; extra == 'all'
43
+ Requires-Dist: snowflake>=0.8.0; extra == 'all'
44
+ Requires-Dist: uvicorn==0.34.0; extra == 'all'
45
+ Provides-Extra: anthropic
46
+ Requires-Dist: anthropic==0.42.0; extra == 'anthropic'
47
+ Requires-Dist: langchain-anthropic>=0.3.1; extra == 'anthropic'
48
+ Provides-Extra: dev
49
+ Requires-Dist: langchain-tests>=0.3.20; extra == 'dev'
50
+ Requires-Dist: pyarrow<19.0.0; extra == 'dev'
51
+ Requires-Dist: pytest==8.3.4; extra == 'dev'
52
+ Requires-Dist: uvicorn==0.34.0; extra == 'dev'
53
+ Provides-Extra: google
54
+ Requires-Dist: google-generativeai==0.8.4; extra == 'google'
55
+ Requires-Dist: langchain-google-genai>=2.0.9; extra == 'google'
56
+ Provides-Extra: openai
57
+ Requires-Dist: langchain-openai>=0.3.16; extra == 'openai'
58
+ Requires-Dist: openai==1.77.0; extra == 'openai'
59
+ Provides-Extra: snowflake
60
+ Requires-Dist: snowflake-snowpark-python>=1.6.0; extra == 'snowflake'
61
+ Requires-Dist: snowflake>=0.8.0; extra == 'snowflake'
62
+ Description-Content-Type: text/markdown
63
+
64
+ ![Timbr logo description](https://timbr.ai/wp-content/uploads/2025/01/logotimbrai230125.png)
65
+
66
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr.svg?type=shield&issueType=security)](https://app.fossa.com/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr?ref=badge_shield&issueType=security)
67
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr.svg?type=shield&issueType=license)](https://app.fossa.com/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr?ref=badge_shield&issueType=license)
68
+
69
+ [![Python 3.9](https://img.shields.io/badge/python-3.9-blue)](https://www.python.org/downloads/release/python-3921/)
70
+ [![Python 3.10](https://img.shields.io/badge/python-3.10-blue.svg)](https://www.python.org/downloads/release/python-31017/)
71
+ [![Python 3.11](https://img.shields.io/badge/python-3.11-blue.svg)](https://www.python.org/downloads/release/python-31112/)
72
+ [![Python 3.12](https://img.shields.io/badge/python-3.12-blue.svg)](https://www.python.org/downloads/release/python-3129/)
73
+
74
+ # Timbr Langchain LLM SDK
75
+
76
+ Timbr langchain LLM SDK is a Python SDK that extends LangChain and LangGraph with custom agents, chains, and nodes for seamless integration with the Timbr semantic layer. It enables converting natural language prompts into optimized semantic-SQL queries and executing them directly against your data.
77
+
78
+ ## Dependencies
79
+ - Access to a timbr-server
80
+ - Python from 3.9.13 or newer
81
+
82
+ ## Installation
83
+
84
+ ### Using pip
85
+ ```bash
86
+ python -m pip install langchain-timbr
87
+ ```
88
+
89
+ ### Using pip from github
90
+ ```bash
91
+ pip install git+https://github.com/WPSemantix/langchain-timbr
92
+ ```
93
+
94
+ ## Documentation
95
+
96
+ For comprehensive documentation and usage examples, please visit:
97
+
98
+ - [Timbr LangChain Documentation](https://docs.timbr.ai/doc/docs/integration/langchain-sdk)
99
+ - [Timbr LangGraph Documentation](https://docs.timbr.ai/doc/docs/integration/langgraph-sdk)
100
+
101
+ ## Configuration
102
+
103
+ The SDK requires several environment variables to be configured. See [`src/langchain_timbr/config.py`](src/langchain_timbr/config.py) for all available configuration options.
@@ -0,0 +1,40 @@
1
+ ![Timbr logo description](https://timbr.ai/wp-content/uploads/2025/01/logotimbrai230125.png)
2
+
3
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr.svg?type=shield&issueType=security)](https://app.fossa.com/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr?ref=badge_shield&issueType=security)
4
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr.svg?type=shield&issueType=license)](https://app.fossa.com/projects/git%2Bgithub.com%2FWPSemantix%2Flangchain-timbr?ref=badge_shield&issueType=license)
5
+
6
+ [![Python 3.9](https://img.shields.io/badge/python-3.9-blue)](https://www.python.org/downloads/release/python-3921/)
7
+ [![Python 3.10](https://img.shields.io/badge/python-3.10-blue.svg)](https://www.python.org/downloads/release/python-31017/)
8
+ [![Python 3.11](https://img.shields.io/badge/python-3.11-blue.svg)](https://www.python.org/downloads/release/python-31112/)
9
+ [![Python 3.12](https://img.shields.io/badge/python-3.12-blue.svg)](https://www.python.org/downloads/release/python-3129/)
10
+
11
+ # Timbr Langchain LLM SDK
12
+
13
+ Timbr langchain LLM SDK is a Python SDK that extends LangChain and LangGraph with custom agents, chains, and nodes for seamless integration with the Timbr semantic layer. It enables converting natural language prompts into optimized semantic-SQL queries and executing them directly against your data.
14
+
15
+ ## Dependencies
16
+ - Access to a timbr-server
17
+ - Python from 3.9.13 or newer
18
+
19
+ ## Installation
20
+
21
+ ### Using pip
22
+ ```bash
23
+ python -m pip install langchain-timbr
24
+ ```
25
+
26
+ ### Using pip from github
27
+ ```bash
28
+ pip install git+https://github.com/WPSemantix/langchain-timbr
29
+ ```
30
+
31
+ ## Documentation
32
+
33
+ For comprehensive documentation and usage examples, please visit:
34
+
35
+ - [Timbr LangChain Documentation](https://docs.timbr.ai/doc/docs/integration/langchain-sdk)
36
+ - [Timbr LangGraph Documentation](https://docs.timbr.ai/doc/docs/integration/langgraph-sdk)
37
+
38
+ ## Configuration
39
+
40
+ The SDK requires several environment variables to be configured. See [`src/langchain_timbr/config.py`](src/langchain_timbr/config.py) for all available configuration options.
@@ -0,0 +1,76 @@
1
+ [build-system]
2
+ requires = ["hatchling>=1.25"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "langchain-timbr"
7
+ version = "1.5.0"
8
+ description = "LangChain & LangGraph extensions that parse LLM prompts into Timbr semantic SQL and execute them."
9
+ readme = "README.md"
10
+ requires-python = ">=3.9,<3.13"
11
+ license = {text = "MIT"}
12
+ authors = [{ name = "Bar Cohen", email = "barco@timbr.ai" }]
13
+ keywords = ["LLM", "LangChain", "LangGraph", "Timbr", "Semantic Layer", "Knowledge Graph", "SQL", "Agents"]
14
+ classifiers = [
15
+ "License :: OSI Approved :: MIT License",
16
+ "Programming Language :: Python :: 3",
17
+ "Programming Language :: Python :: 3 :: Only",
18
+ "Programming Language :: Python :: 3.9",
19
+ "Programming Language :: Python :: 3.10",
20
+ "Programming Language :: Python :: 3.11",
21
+ "Programming Language :: Python :: 3.12",
22
+ "Intended Audience :: Developers",
23
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
24
+ ]
25
+
26
+ dependencies = [
27
+ "cryptography>=44.0.3",
28
+ "langchain>=0.3.25",
29
+ "langchain_community>=0.3.20",
30
+ "langchain-core>=0.3.58",
31
+ "langgraph>=0.3.20",
32
+ "pydantic==2.10.4",
33
+ "pytimbr-api>=2.0.0",
34
+ "tiktoken==0.8.0",
35
+ "transformers>=4.51.3"
36
+ ]
37
+
38
+ [project.optional-dependencies]
39
+ # LLM providers
40
+ openai = ["openai==1.77.0", "langchain-openai>=0.3.16"]
41
+ anthropic = ["anthropic==0.42.0", "langchain-anthropic>=0.3.1"]
42
+ google = ["langchain-google-genai>=2.0.9", "google-generativeai==0.8.4"]
43
+ snowflake = ["snowflake>=0.8.0", "snowflake-snowpark-python>=1.6.0"]
44
+
45
+ # Development and testing
46
+ dev = [
47
+ "pytest==8.3.4",
48
+ "langchain-tests>=0.3.20",
49
+ "pyarrow<19.0.0",
50
+ "uvicorn==0.34.0"
51
+ ]
52
+
53
+ # All optional dependencies
54
+ all = [
55
+ "anthropic==0.42.0",
56
+ "google-generativeai==0.8.4",
57
+ "langchain-anthropic>=0.3.1",
58
+ "openai==1.77.0",
59
+ "langchain-openai>=0.3.16",
60
+ "langchain-google-genai>=2.0.9",
61
+ "snowflake>=0.8.0",
62
+ "snowflake-snowpark-python>=1.6.0",
63
+ "pytest==8.3.4",
64
+ "langchain-tests>=0.3.20",
65
+ "pyarrow<19.0.0",
66
+ "uvicorn==0.34.0"
67
+ ]
68
+
69
+ [project.urls]
70
+ Homepage = "https://github.com/WPSemantix/langchain-timbr"
71
+ Documentation = "https://docs.timbr.ai/doc/docs/integration/langchain-sdk/"
72
+ Source = "https://github.com/WPSemantix/langchain-timbr"
73
+ Issues = "https://github.com/WPSemantix/langchain-timbr/issues"
74
+
75
+ [tool.hatch.build.targets.wheel]
76
+ packages = ["src/langchain_timbr"]
@@ -0,0 +1,15 @@
1
+ [pytest]
2
+ asyncio_default_fixture_loop_scope = function
3
+ testpaths = tests
4
+ pythonpath = src
5
+
6
+ addopts =
7
+ -v
8
+ --tb=short
9
+ --strict-markers
10
+
11
+ # Mark slow tests (if you want to add markers later)
12
+ markers =
13
+ slow: marks tests as slow (deselect with '-m "not slow"')
14
+ integration: marks tests as integration tests
15
+ unit: marks tests as unit tests
@@ -0,0 +1,21 @@
1
+ anthropic==0.42.0
2
+ cryptography>=44.0.3
3
+ google-generativeai==0.8.4
4
+ langchain>=0.3.25
5
+ langchain-anthropic>=0.3.1
6
+ langchain_community>=0.3.20
7
+ langchain-core>=0.3.58
8
+ langchain-google-genai>=2.0.9
9
+ langchain-openai>=0.3.16
10
+ langchain-tests>=0.3.20
11
+ langgraph>=0.3.20
12
+ openai==1.77.0
13
+ pyarrow<19.0.0
14
+ pydantic==2.10.4
15
+ pytest==8.3.4
16
+ pytimbr-api>=2.0.0
17
+ snowflake>=0.8.0
18
+ snowflake-snowpark-python>=1.6.0
19
+ tiktoken==0.8.0
20
+ transformers>=4.51.3
21
+ uvicorn==0.34.0
@@ -0,0 +1,17 @@
1
+ #
2
+ # *### ., @%
3
+ # *%## `#// %%%* *@ `` @%
4
+ # #*. * .%%%` @@@@* @@ @@@@,@@@@ @&@@@@ .&@@@*
5
+ # #%%# .. *@ @@ @` @@` ,@ @% #@ @@
6
+ # ,, .,%(##/./%%#, *@ @@ @` @@` ,@ @% #@ @@
7
+ # ,%##% `` `/@@* @@ @` @@` ,@ (/@@@#/ @@
8
+ # ``
9
+ # ``````````````````````````````````````````````````````````````
10
+ # Copyright (C) 2018-2025 timbr.ai
11
+
12
+ __version__ = "1.4.3"
13
+ from .timbr_llm_connector import TimbrLlmConnector
14
+ from .llm_wrapper.llm_wrapper import LlmWrapper, LlmTypes
15
+
16
+ from .langchain import *
17
+ from .langgraph import *
@@ -0,0 +1,21 @@
1
+ import os
2
+ from .utils.general import to_boolean, to_integer, parse_list
3
+
4
+ # MUST HAVE VARIABLES
5
+ url = os.environ.get('TIMBR_URL')
6
+ token = os.environ.get('TIMBR_TOKEN')
7
+ ontology = os.environ.get('ONTOLOGY', 'system_db')
8
+
9
+ # OPTIONAL VARIABLES
10
+ is_jwt = to_boolean(os.environ.get('IS_JWT', 'false'))
11
+ jwt_tenant_id = os.environ.get('JWT_TENANT_ID', None)
12
+
13
+ cache_timeout = to_integer(os.environ.get('CACHE_TIMEOUT', 120))
14
+ ignore_tags = parse_list(os.environ.get('IGNORE_TAGS', 'icon'))
15
+ ignore_tags_prefix = parse_list(os.environ.get('IGNORE_TAGS_PREFIX', 'mdx.,bli.'))
16
+
17
+ llm_type = os.environ.get('LLM_TYPE')
18
+ llm_model = os.environ.get('LLM_MODEL')
19
+ llm_api_key = os.environ.get('LLM_API_KEY')
20
+ llm_temperature = os.environ.get('LLM_TEMPERATURE', 0.0)
21
+ llm_timeout = to_integer(os.environ.get('LLM_TIMEOUT', 60)) # Default 60 seconds timeout
@@ -0,0 +1,16 @@
1
+ from .identify_concept_chain import IdentifyTimbrConceptChain
2
+ from .generate_timbr_sql_chain import GenerateTimbrSqlChain
3
+ from .validate_timbr_sql_chain import ValidateTimbrSqlChain
4
+ from .execute_timbr_query_chain import ExecuteTimbrQueryChain
5
+ from .generate_answer_chain import GenerateAnswerChain
6
+ from .timbr_sql_agent import TimbrSqlAgent, create_timbr_sql_agent
7
+
8
+ __all__ = [
9
+ "IdentifyTimbrConceptChain",
10
+ "GenerateTimbrSqlChain",
11
+ "ValidateTimbrSqlChain",
12
+ "ExecuteTimbrQueryChain",
13
+ "GenerateAnswerChain",
14
+ "TimbrSqlAgent",
15
+ "create_timbr_sql_agent",
16
+ ]