langchain-zunno 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Zunno
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,200 @@
1
+ Metadata-Version: 2.4
2
+ Name: langchain-zunno
3
+ Version: 0.1.0
4
+ Summary: LangChain integration for Zunno LLM and Embeddings
5
+ Home-page: https://github.com/zunno/langchain-zunno
6
+ Author: Amit Kumar
7
+ Author-email: Amit Kumar <amit@zunno.ai>
8
+ Maintainer-email: Amit Kumar <amit@zunno.ai>
9
+ License: MIT
10
+ Project-URL: Homepage, https://github.com/zunno/langchain-zunno
11
+ Project-URL: Documentation, https://github.com/zunno/langchain-zunno#readme
12
+ Project-URL: Repository, https://github.com/zunno/langchain-zunno
13
+ Project-URL: Bug Tracker, https://github.com/zunno/langchain-zunno/issues
14
+ Keywords: langchain,llm,embeddings,zunno,ai,machine-learning
15
+ Classifier: Development Status :: 3 - Alpha
16
+ Classifier: Intended Audience :: Developers
17
+ Classifier: License :: OSI Approved :: MIT License
18
+ Classifier: Operating System :: OS Independent
19
+ Classifier: Programming Language :: Python :: 3
20
+ Classifier: Programming Language :: Python :: 3.8
21
+ Classifier: Programming Language :: Python :: 3.9
22
+ Classifier: Programming Language :: Python :: 3.10
23
+ Classifier: Programming Language :: Python :: 3.11
24
+ Classifier: Programming Language :: Python :: 3.12
25
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
26
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
27
+ Requires-Python: >=3.8
28
+ Description-Content-Type: text/markdown
29
+ License-File: LICENSE
30
+ Requires-Dist: langchain>=0.1.0
31
+ Requires-Dist: langchain-core>=0.1.0
32
+ Requires-Dist: requests>=2.25.0
33
+ Requires-Dist: httpx>=0.24.0
34
+ Requires-Dist: pydantic>=2.0.0
35
+ Provides-Extra: dev
36
+ Requires-Dist: pytest>=7.0.0; extra == "dev"
37
+ Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
38
+ Requires-Dist: black>=22.0.0; extra == "dev"
39
+ Requires-Dist: isort>=5.0.0; extra == "dev"
40
+ Requires-Dist: flake8>=4.0.0; extra == "dev"
41
+ Dynamic: author
42
+ Dynamic: home-page
43
+ Dynamic: license-file
44
+ Dynamic: requires-python
45
+
46
+ # LangChain Zunno Integration
47
+
48
+ A LangChain integration for Zunno LLM and Embeddings, providing easy-to-use wrappers for text generation and embeddings.
49
+
50
+ ## Installation
51
+
52
+ ```bash
53
+ pip install langchain-zunno
54
+ ```
55
+
56
+ ## Quick Start
57
+
58
+ ### Text Generation (LLM)
59
+
60
+ ```python
61
+ from langchain_zunno import ZunnoLLM
62
+
63
+ # Create an LLM instance
64
+ llm = ZunnoLLM(model_name="mistral:latest")
65
+
66
+ # Generate text
67
+ response = llm.invoke("Hello, how are you?")
68
+ print(response)
69
+ ```
70
+
71
+ ### Embeddings
72
+
73
+ ```python
74
+ from langchain_zunno import ZunnoLLMEmbeddings
75
+
76
+ # Create an embeddings instance
77
+ embeddings = ZunnoLLMEmbeddings(model_name="mistral:latest")
78
+
79
+ # Get embeddings for a single text
80
+ embedding = embeddings.embed_query("Hello, how are you?")
81
+ print(f"Embedding dimension: {len(embedding)}")
82
+
83
+ # Get embeddings for multiple texts
84
+ texts = ["Hello world", "How are you?", "Good morning"]
85
+ embeddings_list = embeddings.embed_documents(texts)
86
+ print(f"Number of embeddings: {len(embeddings_list)}")
87
+ ```
88
+
89
+ ### Async Usage
90
+
91
+ ```python
92
+ import asyncio
93
+ from langchain_zunno import ZunnoLLM, ZunnoLLMEmbeddings
94
+
95
+ async def main():
96
+ # Async LLM
97
+ llm = ZunnoLLM(model_name="mistral:latest")
98
+ response = await llm.ainvoke("Hello, how are you?")
99
+ print(response)
100
+
101
+ # Async embeddings
102
+ embeddings = ZunnoLLMEmbeddings(model_name="mistral:latest")
103
+ embedding = await embeddings.aembed_query("Hello, how are you?")
104
+ print(f"Embedding dimension: {len(embedding)}")
105
+
106
+ asyncio.run(main())
107
+ ```
108
+
109
+ ## Factory Functions
110
+
111
+ For convenience, you can use factory functions to create instances:
112
+
113
+ ```python
114
+ from langchain_zunno import create_zunno_llm, create_zunno_embeddings
115
+
116
+ # Create LLM
117
+ llm = create_zunno_llm(
118
+ model_name="mistral:latest",
119
+ temperature=0.7,
120
+ max_tokens=100
121
+ )
122
+
123
+ # Create embeddings
124
+ embeddings = create_zunno_embeddings(
125
+ model_name="mistral:latest"
126
+ )
127
+ ```
128
+
129
+ ## Configuration
130
+
131
+ ### LLM Configuration
132
+
133
+ - `model_name`: The name of the model to use
134
+ - `base_url`: API endpoint (default: "http://15.206.124.44/v1/prompt-response")
135
+ - `temperature`: Controls randomness in generation (default: 0.7)
136
+ - `max_tokens`: Maximum number of tokens to generate (optional)
137
+ - `timeout`: Request timeout in seconds (default: 300)
138
+
139
+ ### Embeddings Configuration
140
+
141
+ - `model_name`: The name of the embedding model to use
142
+ - `base_url`: API endpoint (default: "http://15.206.124.44/v1/text-embeddings")
143
+ - `timeout`: Request timeout in seconds (default: 300)
144
+
145
+ ## API Endpoints
146
+
147
+ The package connects to the following Zunno API endpoints:
148
+
149
+ - **Text Generation**: `http://15.206.124.44/v1/prompt-response`
150
+ - **Embeddings**: `http://15.206.124.44/v1/text-embeddings`
151
+
152
+ ## Error Handling
153
+
154
+ The package includes comprehensive error handling:
155
+
156
+ ```python
157
+ try:
158
+ response = llm.invoke("Hello")
159
+ except Exception as e:
160
+ print(f"Error: {e}")
161
+ ```
162
+
163
+ ## Development
164
+
165
+ ### Installation for Development
166
+
167
+ ```bash
168
+ git clone https://github.com/zunno/langchain-zunno.git
169
+ cd langchain-zunno
170
+ pip install -e ".[dev]"
171
+ ```
172
+
173
+ ### Running Tests
174
+
175
+ ```bash
176
+ pytest
177
+ ```
178
+
179
+ ### Code Formatting
180
+
181
+ ```bash
182
+ black .
183
+ isort .
184
+ ```
185
+
186
+ ## License
187
+
188
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
189
+
190
+ ## Contributing
191
+
192
+ 1. Fork the repository
193
+ 2. Create a feature branch
194
+ 3. Make your changes
195
+ 4. Add tests
196
+ 5. Submit a pull request
197
+
198
+ ## Support
199
+
200
+ For support, please open an issue on GitHub or contact us at support@zunno.ai.
@@ -0,0 +1,155 @@
1
+ # LangChain Zunno Integration
2
+
3
+ A LangChain integration for Zunno LLM and Embeddings, providing easy-to-use wrappers for text generation and embeddings.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ pip install langchain-zunno
9
+ ```
10
+
11
+ ## Quick Start
12
+
13
+ ### Text Generation (LLM)
14
+
15
+ ```python
16
+ from langchain_zunno import ZunnoLLM
17
+
18
+ # Create an LLM instance
19
+ llm = ZunnoLLM(model_name="mistral:latest")
20
+
21
+ # Generate text
22
+ response = llm.invoke("Hello, how are you?")
23
+ print(response)
24
+ ```
25
+
26
+ ### Embeddings
27
+
28
+ ```python
29
+ from langchain_zunno import ZunnoLLMEmbeddings
30
+
31
+ # Create an embeddings instance
32
+ embeddings = ZunnoLLMEmbeddings(model_name="mistral:latest")
33
+
34
+ # Get embeddings for a single text
35
+ embedding = embeddings.embed_query("Hello, how are you?")
36
+ print(f"Embedding dimension: {len(embedding)}")
37
+
38
+ # Get embeddings for multiple texts
39
+ texts = ["Hello world", "How are you?", "Good morning"]
40
+ embeddings_list = embeddings.embed_documents(texts)
41
+ print(f"Number of embeddings: {len(embeddings_list)}")
42
+ ```
43
+
44
+ ### Async Usage
45
+
46
+ ```python
47
+ import asyncio
48
+ from langchain_zunno import ZunnoLLM, ZunnoLLMEmbeddings
49
+
50
+ async def main():
51
+ # Async LLM
52
+ llm = ZunnoLLM(model_name="mistral:latest")
53
+ response = await llm.ainvoke("Hello, how are you?")
54
+ print(response)
55
+
56
+ # Async embeddings
57
+ embeddings = ZunnoLLMEmbeddings(model_name="mistral:latest")
58
+ embedding = await embeddings.aembed_query("Hello, how are you?")
59
+ print(f"Embedding dimension: {len(embedding)}")
60
+
61
+ asyncio.run(main())
62
+ ```
63
+
64
+ ## Factory Functions
65
+
66
+ For convenience, you can use factory functions to create instances:
67
+
68
+ ```python
69
+ from langchain_zunno import create_zunno_llm, create_zunno_embeddings
70
+
71
+ # Create LLM
72
+ llm = create_zunno_llm(
73
+ model_name="mistral:latest",
74
+ temperature=0.7,
75
+ max_tokens=100
76
+ )
77
+
78
+ # Create embeddings
79
+ embeddings = create_zunno_embeddings(
80
+ model_name="mistral:latest"
81
+ )
82
+ ```
83
+
84
+ ## Configuration
85
+
86
+ ### LLM Configuration
87
+
88
+ - `model_name`: The name of the model to use
89
+ - `base_url`: API endpoint (default: "http://15.206.124.44/v1/prompt-response")
90
+ - `temperature`: Controls randomness in generation (default: 0.7)
91
+ - `max_tokens`: Maximum number of tokens to generate (optional)
92
+ - `timeout`: Request timeout in seconds (default: 300)
93
+
94
+ ### Embeddings Configuration
95
+
96
+ - `model_name`: The name of the embedding model to use
97
+ - `base_url`: API endpoint (default: "http://15.206.124.44/v1/text-embeddings")
98
+ - `timeout`: Request timeout in seconds (default: 300)
99
+
100
+ ## API Endpoints
101
+
102
+ The package connects to the following Zunno API endpoints:
103
+
104
+ - **Text Generation**: `http://15.206.124.44/v1/prompt-response`
105
+ - **Embeddings**: `http://15.206.124.44/v1/text-embeddings`
106
+
107
+ ## Error Handling
108
+
109
+ The package includes comprehensive error handling:
110
+
111
+ ```python
112
+ try:
113
+ response = llm.invoke("Hello")
114
+ except Exception as e:
115
+ print(f"Error: {e}")
116
+ ```
117
+
118
+ ## Development
119
+
120
+ ### Installation for Development
121
+
122
+ ```bash
123
+ git clone https://github.com/zunno/langchain-zunno.git
124
+ cd langchain-zunno
125
+ pip install -e ".[dev]"
126
+ ```
127
+
128
+ ### Running Tests
129
+
130
+ ```bash
131
+ pytest
132
+ ```
133
+
134
+ ### Code Formatting
135
+
136
+ ```bash
137
+ black .
138
+ isort .
139
+ ```
140
+
141
+ ## License
142
+
143
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
144
+
145
+ ## Contributing
146
+
147
+ 1. Fork the repository
148
+ 2. Create a feature branch
149
+ 3. Make your changes
150
+ 4. Add tests
151
+ 5. Submit a pull request
152
+
153
+ ## Support
154
+
155
+ For support, please open an issue on GitHub or contact us at support@zunno.ai.
@@ -0,0 +1,200 @@
1
+ Metadata-Version: 2.4
2
+ Name: langchain-zunno
3
+ Version: 0.1.0
4
+ Summary: LangChain integration for Zunno LLM and Embeddings
5
+ Home-page: https://github.com/zunno/langchain-zunno
6
+ Author: Amit Kumar
7
+ Author-email: Amit Kumar <amit@zunno.ai>
8
+ Maintainer-email: Amit Kumar <amit@zunno.ai>
9
+ License: MIT
10
+ Project-URL: Homepage, https://github.com/zunno/langchain-zunno
11
+ Project-URL: Documentation, https://github.com/zunno/langchain-zunno#readme
12
+ Project-URL: Repository, https://github.com/zunno/langchain-zunno
13
+ Project-URL: Bug Tracker, https://github.com/zunno/langchain-zunno/issues
14
+ Keywords: langchain,llm,embeddings,zunno,ai,machine-learning
15
+ Classifier: Development Status :: 3 - Alpha
16
+ Classifier: Intended Audience :: Developers
17
+ Classifier: License :: OSI Approved :: MIT License
18
+ Classifier: Operating System :: OS Independent
19
+ Classifier: Programming Language :: Python :: 3
20
+ Classifier: Programming Language :: Python :: 3.8
21
+ Classifier: Programming Language :: Python :: 3.9
22
+ Classifier: Programming Language :: Python :: 3.10
23
+ Classifier: Programming Language :: Python :: 3.11
24
+ Classifier: Programming Language :: Python :: 3.12
25
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
26
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
27
+ Requires-Python: >=3.8
28
+ Description-Content-Type: text/markdown
29
+ License-File: LICENSE
30
+ Requires-Dist: langchain>=0.1.0
31
+ Requires-Dist: langchain-core>=0.1.0
32
+ Requires-Dist: requests>=2.25.0
33
+ Requires-Dist: httpx>=0.24.0
34
+ Requires-Dist: pydantic>=2.0.0
35
+ Provides-Extra: dev
36
+ Requires-Dist: pytest>=7.0.0; extra == "dev"
37
+ Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
38
+ Requires-Dist: black>=22.0.0; extra == "dev"
39
+ Requires-Dist: isort>=5.0.0; extra == "dev"
40
+ Requires-Dist: flake8>=4.0.0; extra == "dev"
41
+ Dynamic: author
42
+ Dynamic: home-page
43
+ Dynamic: license-file
44
+ Dynamic: requires-python
45
+
46
+ # LangChain Zunno Integration
47
+
48
+ A LangChain integration for Zunno LLM and Embeddings, providing easy-to-use wrappers for text generation and embeddings.
49
+
50
+ ## Installation
51
+
52
+ ```bash
53
+ pip install langchain-zunno
54
+ ```
55
+
56
+ ## Quick Start
57
+
58
+ ### Text Generation (LLM)
59
+
60
+ ```python
61
+ from langchain_zunno import ZunnoLLM
62
+
63
+ # Create an LLM instance
64
+ llm = ZunnoLLM(model_name="mistral:latest")
65
+
66
+ # Generate text
67
+ response = llm.invoke("Hello, how are you?")
68
+ print(response)
69
+ ```
70
+
71
+ ### Embeddings
72
+
73
+ ```python
74
+ from langchain_zunno import ZunnoLLMEmbeddings
75
+
76
+ # Create an embeddings instance
77
+ embeddings = ZunnoLLMEmbeddings(model_name="mistral:latest")
78
+
79
+ # Get embeddings for a single text
80
+ embedding = embeddings.embed_query("Hello, how are you?")
81
+ print(f"Embedding dimension: {len(embedding)}")
82
+
83
+ # Get embeddings for multiple texts
84
+ texts = ["Hello world", "How are you?", "Good morning"]
85
+ embeddings_list = embeddings.embed_documents(texts)
86
+ print(f"Number of embeddings: {len(embeddings_list)}")
87
+ ```
88
+
89
+ ### Async Usage
90
+
91
+ ```python
92
+ import asyncio
93
+ from langchain_zunno import ZunnoLLM, ZunnoLLMEmbeddings
94
+
95
+ async def main():
96
+ # Async LLM
97
+ llm = ZunnoLLM(model_name="mistral:latest")
98
+ response = await llm.ainvoke("Hello, how are you?")
99
+ print(response)
100
+
101
+ # Async embeddings
102
+ embeddings = ZunnoLLMEmbeddings(model_name="mistral:latest")
103
+ embedding = await embeddings.aembed_query("Hello, how are you?")
104
+ print(f"Embedding dimension: {len(embedding)}")
105
+
106
+ asyncio.run(main())
107
+ ```
108
+
109
+ ## Factory Functions
110
+
111
+ For convenience, you can use factory functions to create instances:
112
+
113
+ ```python
114
+ from langchain_zunno import create_zunno_llm, create_zunno_embeddings
115
+
116
+ # Create LLM
117
+ llm = create_zunno_llm(
118
+ model_name="mistral:latest",
119
+ temperature=0.7,
120
+ max_tokens=100
121
+ )
122
+
123
+ # Create embeddings
124
+ embeddings = create_zunno_embeddings(
125
+ model_name="mistral:latest"
126
+ )
127
+ ```
128
+
129
+ ## Configuration
130
+
131
+ ### LLM Configuration
132
+
133
+ - `model_name`: The name of the model to use
134
+ - `base_url`: API endpoint (default: "http://15.206.124.44/v1/prompt-response")
135
+ - `temperature`: Controls randomness in generation (default: 0.7)
136
+ - `max_tokens`: Maximum number of tokens to generate (optional)
137
+ - `timeout`: Request timeout in seconds (default: 300)
138
+
139
+ ### Embeddings Configuration
140
+
141
+ - `model_name`: The name of the embedding model to use
142
+ - `base_url`: API endpoint (default: "http://15.206.124.44/v1/text-embeddings")
143
+ - `timeout`: Request timeout in seconds (default: 300)
144
+
145
+ ## API Endpoints
146
+
147
+ The package connects to the following Zunno API endpoints:
148
+
149
+ - **Text Generation**: `http://15.206.124.44/v1/prompt-response`
150
+ - **Embeddings**: `http://15.206.124.44/v1/text-embeddings`
151
+
152
+ ## Error Handling
153
+
154
+ The package includes comprehensive error handling:
155
+
156
+ ```python
157
+ try:
158
+ response = llm.invoke("Hello")
159
+ except Exception as e:
160
+ print(f"Error: {e}")
161
+ ```
162
+
163
+ ## Development
164
+
165
+ ### Installation for Development
166
+
167
+ ```bash
168
+ git clone https://github.com/zunno/langchain-zunno.git
169
+ cd langchain-zunno
170
+ pip install -e ".[dev]"
171
+ ```
172
+
173
+ ### Running Tests
174
+
175
+ ```bash
176
+ pytest
177
+ ```
178
+
179
+ ### Code Formatting
180
+
181
+ ```bash
182
+ black .
183
+ isort .
184
+ ```
185
+
186
+ ## License
187
+
188
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
189
+
190
+ ## Contributing
191
+
192
+ 1. Fork the repository
193
+ 2. Create a feature branch
194
+ 3. Make your changes
195
+ 4. Add tests
196
+ 5. Submit a pull request
197
+
198
+ ## Support
199
+
200
+ For support, please open an issue on GitHub or contact us at support@zunno.ai.
@@ -0,0 +1,9 @@
1
+ LICENSE
2
+ README.md
3
+ pyproject.toml
4
+ setup.py
5
+ langchain_zunno.egg-info/PKG-INFO
6
+ langchain_zunno.egg-info/SOURCES.txt
7
+ langchain_zunno.egg-info/dependency_links.txt
8
+ langchain_zunno.egg-info/requires.txt
9
+ langchain_zunno.egg-info/top_level.txt
@@ -0,0 +1,12 @@
1
+ langchain>=0.1.0
2
+ langchain-core>=0.1.0
3
+ requests>=2.25.0
4
+ httpx>=0.24.0
5
+ pydantic>=2.0.0
6
+
7
+ [dev]
8
+ pytest>=7.0.0
9
+ pytest-asyncio>=0.21.0
10
+ black>=22.0.0
11
+ isort>=5.0.0
12
+ flake8>=4.0.0
@@ -0,0 +1,89 @@
1
+ [build-system]
2
+ requires = ["setuptools>=61.0", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "langchain-zunno"
7
+ version = "0.1.0"
8
+ description = "LangChain integration for Zunno LLM and Embeddings"
9
+ readme = "README.md"
10
+ license = {text = "MIT"}
11
+ authors = [
12
+ {name = "Amit Kumar", email = "amit@zunno.ai"}
13
+ ]
14
+ maintainers = [
15
+ {name = "Amit Kumar", email = "amit@zunno.ai"}
16
+ ]
17
+ keywords = ["langchain", "llm", "embeddings", "zunno", "ai", "machine-learning"]
18
+ classifiers = [
19
+ "Development Status :: 3 - Alpha",
20
+ "Intended Audience :: Developers",
21
+ "License :: OSI Approved :: MIT License",
22
+ "Operating System :: OS Independent",
23
+ "Programming Language :: Python :: 3",
24
+ "Programming Language :: Python :: 3.8",
25
+ "Programming Language :: Python :: 3.9",
26
+ "Programming Language :: Python :: 3.10",
27
+ "Programming Language :: Python :: 3.11",
28
+ "Programming Language :: Python :: 3.12",
29
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
30
+ "Topic :: Software Development :: Libraries :: Python Modules",
31
+ ]
32
+ requires-python = ">=3.8"
33
+ dependencies = [
34
+ "langchain>=0.1.0",
35
+ "langchain-core>=0.1.0",
36
+ "requests>=2.25.0",
37
+ "httpx>=0.24.0",
38
+ "pydantic>=2.0.0",
39
+ ]
40
+
41
+ [project.optional-dependencies]
42
+ dev = [
43
+ "pytest>=7.0.0",
44
+ "pytest-asyncio>=0.21.0",
45
+ "black>=22.0.0",
46
+ "isort>=5.0.0",
47
+ "flake8>=4.0.0",
48
+ ]
49
+
50
+ [project.urls]
51
+ Homepage = "https://github.com/zunno/langchain-zunno"
52
+ Documentation = "https://github.com/zunno/langchain-zunno#readme"
53
+ Repository = "https://github.com/zunno/langchain-zunno"
54
+ "Bug Tracker" = "https://github.com/zunno/langchain-zunno/issues"
55
+
56
+ [tool.setuptools.packages.find]
57
+ where = ["."]
58
+ include = ["langchain_zunno*"]
59
+
60
+ [tool.black]
61
+ line-length = 88
62
+ target-version = ['py38']
63
+ include = '\.pyi?$'
64
+ extend-exclude = '''
65
+ /(
66
+ # directories
67
+ \.eggs
68
+ | \.git
69
+ | \.hg
70
+ | \.mypy_cache
71
+ | \.tox
72
+ | \.venv
73
+ | build
74
+ | dist
75
+ )/
76
+ '''
77
+
78
+ [tool.isort]
79
+ profile = "black"
80
+ multi_line_output = 3
81
+ line_length = 88
82
+ known_first_party = ["langchain_zunno"]
83
+
84
+ [tool.pytest.ini_options]
85
+ testpaths = ["tests"]
86
+ python_files = ["test_*.py"]
87
+ python_classes = ["Test*"]
88
+ python_functions = ["test_*"]
89
+ addopts = "-v --tb=short"
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,53 @@
1
+ from setuptools import setup, find_packages
2
+
3
+ with open("README.md", "r", encoding="utf-8") as fh:
4
+ long_description = fh.read()
5
+
6
+ setup(
7
+ name="langchain-zunno",
8
+ version="0.1.0",
9
+ author="Amit Kumar",
10
+ author_email="amit@zunno.ai",
11
+ description="LangChain integration for Zunno LLM and Embeddings",
12
+ long_description=long_description,
13
+ long_description_content_type="text/markdown",
14
+ url="https://github.com/zunno/langchain-zunno",
15
+ packages=find_packages(),
16
+ classifiers=[
17
+ "Development Status :: 3 - Alpha",
18
+ "Intended Audience :: Developers",
19
+ "License :: OSI Approved :: MIT License",
20
+ "Operating System :: OS Independent",
21
+ "Programming Language :: Python :: 3",
22
+ "Programming Language :: Python :: 3.8",
23
+ "Programming Language :: Python :: 3.9",
24
+ "Programming Language :: Python :: 3.10",
25
+ "Programming Language :: Python :: 3.11",
26
+ "Programming Language :: Python :: 3.12",
27
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
28
+ "Topic :: Software Development :: Libraries :: Python Modules",
29
+ ],
30
+ python_requires=">=3.8",
31
+ install_requires=[
32
+ "langchain>=0.1.0",
33
+ "langchain-core>=0.1.0",
34
+ "requests>=2.25.0",
35
+ "httpx>=0.24.0",
36
+ "pydantic>=2.0.0",
37
+ ],
38
+ extras_require={
39
+ "dev": [
40
+ "pytest>=7.0.0",
41
+ "pytest-asyncio>=0.21.0",
42
+ "black>=22.0.0",
43
+ "isort>=5.0.0",
44
+ "flake8>=4.0.0",
45
+ ],
46
+ },
47
+ keywords="langchain, llm, embeddings, zunno, ai, machine-learning",
48
+ project_urls={
49
+ "Bug Reports": "https://github.com/zunno/langchain-zunno/issues",
50
+ "Source": "https://github.com/zunno/langchain-zunno",
51
+ "Documentation": "https://github.com/zunno/langchain-zunno#readme",
52
+ },
53
+ )