langchain-kinetica 1.1.0__tar.gz → 1.2.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/PKG-INFO +4 -6
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/README.md +3 -5
- langchain_kinetica-1.2.0/notebooks/README.md +37 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/notebooks/kinetica_chat.ipynb +56 -70
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/notebooks/kinetica_loader.ipynb +15 -8
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/notebooks/kinetica_provider.ipynb +12 -7
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/notebooks/kinetica_retriever.ipynb +24 -24
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/notebooks/kinetica_vectorstore.ipynb +45 -46
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/pyproject.toml +2 -1
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/tests/integration_tests/test_chat_models.py +29 -31
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/uv.lock +40 -1
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/.editorconfig +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/.github/workflows/release.yml +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/.github/workflows/test.yml +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/.gitignore +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/.python-version +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/Makefile +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/langchain_kinetica/__init__.py +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/langchain_kinetica/chat_models.py +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/langchain_kinetica/document_loaders.py +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/langchain_kinetica/py.typed +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/langchain_kinetica/vectorstores.py +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/notebooks/state_of_the_union.txt +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/scripts/check_imports.py +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/scripts/lint_imports.sh +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/tests/integration_tests/__init__.py +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/tests/integration_tests/test_vectorstores.py +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/tests/unit_tests/__init__.py +0 -0
- {langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/tests/unit_tests/test_unit_chat_models.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: langchain-kinetica
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.2.0
|
|
4
4
|
Summary: An integration package connecting Kinetica and LangChain
|
|
5
5
|
Project-URL: Homepage, https://kinetica.com
|
|
6
6
|
Project-URL: Documentation, https://docs.kinetica.com/7.1/sql-gpt/
|
|
@@ -38,19 +38,17 @@ This package provides integration for core capabilities:
|
|
|
38
38
|
- **Chat model** - Kinetica native Text-to-SQL Generation.
|
|
39
39
|
- **Vector Store** - Vector similarity search using Kinetica tables.
|
|
40
40
|
- **Document Loader** - Generate embeddings from Kinetica tables.
|
|
41
|
-
|
|
42
|
-
For more information see the
|
|
43
|
-
[Kinteica Provider Docs](https://docs.langchain.com/oss/python/integrations/providers/kinetica)
|
|
41
|
+
- **Retriever** - Document retriever based on the Vector Store.
|
|
44
42
|
|
|
45
43
|
## Quick Install
|
|
46
44
|
|
|
47
45
|
```bash
|
|
48
|
-
pip install langchain-
|
|
46
|
+
pip install langchain-kinetica
|
|
49
47
|
```
|
|
50
48
|
|
|
51
49
|
## Documentation
|
|
52
50
|
|
|
53
|
-
For conceptual guides, tutorials, and examples on using these classes, see the [
|
|
51
|
+
For conceptual guides, tutorials, and examples on using these classes, see the [Kinetica Provider Docs](https://docs.langchain.com/oss/python/integrations/providers/kinetica).
|
|
54
52
|
|
|
55
53
|
The documentation is also available in notebook format under `./notebooks`.
|
|
56
54
|
|
|
@@ -22,19 +22,17 @@ This package provides integration for core capabilities:
|
|
|
22
22
|
- **Chat model** - Kinetica native Text-to-SQL Generation.
|
|
23
23
|
- **Vector Store** - Vector similarity search using Kinetica tables.
|
|
24
24
|
- **Document Loader** - Generate embeddings from Kinetica tables.
|
|
25
|
-
|
|
26
|
-
For more information see the
|
|
27
|
-
[Kinteica Provider Docs](https://docs.langchain.com/oss/python/integrations/providers/kinetica)
|
|
25
|
+
- **Retriever** - Document retriever based on the Vector Store.
|
|
28
26
|
|
|
29
27
|
## Quick Install
|
|
30
28
|
|
|
31
29
|
```bash
|
|
32
|
-
pip install langchain-
|
|
30
|
+
pip install langchain-kinetica
|
|
33
31
|
```
|
|
34
32
|
|
|
35
33
|
## Documentation
|
|
36
34
|
|
|
37
|
-
For conceptual guides, tutorials, and examples on using these classes, see the [
|
|
35
|
+
For conceptual guides, tutorials, and examples on using these classes, see the [Kinetica Provider Docs](https://docs.langchain.com/oss/python/integrations/providers/kinetica).
|
|
38
36
|
|
|
39
37
|
The documentation is also available in notebook format under `./notebooks`.
|
|
40
38
|
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# langchain-kinetica example notebooks
|
|
2
|
+
|
|
3
|
+
These notebooks are used to generate documentation in the
|
|
4
|
+
[langchain-docs](https://github.com/langchain-ai/docs) repository and will be
|
|
5
|
+
published to the official [langchain docs](https://docs.langchain.com/oss/python/integrations/providers/kinetica).
|
|
6
|
+
|
|
7
|
+
To update the documentation:
|
|
8
|
+
|
|
9
|
+
1. Make changes to the notebooks and save the changes with the results.
|
|
10
|
+
|
|
11
|
+
2. run `make docs` to generate the markdown files in `./md_docs`:
|
|
12
|
+
```
|
|
13
|
+
./md_docs/kinetica_provider.md
|
|
14
|
+
./md_docs/kinetica_vectorstore.md
|
|
15
|
+
./md_docs/kinetica_loader.md
|
|
16
|
+
./md_docs/kinetica_chat.md
|
|
17
|
+
./md_docs/kinetica_retriever.md
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
3. Clone the kinetica fork of [langchain-docs](https://github.com/kineticadb/langchain-docs)
|
|
21
|
+
|
|
22
|
+
4. Checkout a new a feature branch in `langchain-docs`.
|
|
23
|
+
|
|
24
|
+
5. Diff the markdown files with the associated files in `langchain-docs` and merge changes.
|
|
25
|
+
```
|
|
26
|
+
./src/oss/python/integrations/retrievers/kinetica.mdx
|
|
27
|
+
./src/oss/python/integrations/chat/kinetica.mdx
|
|
28
|
+
./src/oss/python/integrations/providers/kinetica.mdx
|
|
29
|
+
./src/oss/python/integrations/vectorstores/kinetica.mdx
|
|
30
|
+
./src/oss/python/integrations/document_loaders/kinetica.mdx
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
6. Follow the LangChain
|
|
34
|
+
[contribution guidelines](https://docs.langchain.com/oss/python/contributing/documentation).
|
|
35
|
+
(e.g. run `format` and `lint`)
|
|
36
|
+
|
|
37
|
+
7. Commit changes and open a PR to merge the Kinetica fork to the LanChain repo.
|
|
@@ -10,7 +10,8 @@
|
|
|
10
10
|
},
|
|
11
11
|
"source": [
|
|
12
12
|
"---\n",
|
|
13
|
-
"title: Kinetica
|
|
13
|
+
"title: \"Kinetica language to SQL integration\"\n",
|
|
14
|
+
"description: \"Integrate with the Kinetica language to SQL chat model using LangChain Python.\"\n",
|
|
14
15
|
"---"
|
|
15
16
|
]
|
|
16
17
|
},
|
|
@@ -60,22 +61,16 @@
|
|
|
60
61
|
},
|
|
61
62
|
{
|
|
62
63
|
"cell_type": "code",
|
|
63
|
-
"execution_count":
|
|
64
|
+
"execution_count": null,
|
|
64
65
|
"id": "f697712c",
|
|
65
|
-
"metadata": {
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
"name": "stdout",
|
|
69
|
-
"output_type": "stream",
|
|
70
|
-
"text": [
|
|
71
|
-
"/Users/chadjuliano/git/langchain-kinetica/.venv/bin/python: No module named pip\n",
|
|
72
|
-
"Note: you may need to restart the kernel to use updated packages.\n"
|
|
73
|
-
]
|
|
66
|
+
"metadata": {
|
|
67
|
+
"vscode": {
|
|
68
|
+
"languageId": "shellscript"
|
|
74
69
|
}
|
|
75
|
-
|
|
70
|
+
},
|
|
71
|
+
"outputs": [],
|
|
76
72
|
"source": [
|
|
77
|
-
"
|
|
78
|
-
"%pip install -qU langchain-kinetica, faker"
|
|
73
|
+
"pip install -qU langchain-kinetica faker"
|
|
79
74
|
]
|
|
80
75
|
},
|
|
81
76
|
{
|
|
@@ -87,7 +82,7 @@
|
|
|
87
82
|
"\n",
|
|
88
83
|
"You must set the database connection in the following environment variables. If you are using a virtual environment you can set them in the `.env` file of the project:\n",
|
|
89
84
|
"\n",
|
|
90
|
-
"* `KINETICA_URL`: Database connection URL (e.g. http://localhost:9191)\n",
|
|
85
|
+
"* `KINETICA_URL`: Database connection URL (e.g. `http://localhost:9191`)\n",
|
|
91
86
|
"* `KINETICA_USER`: Database user\n",
|
|
92
87
|
"* `KINETICA_PASSWD`: Secure password.\n",
|
|
93
88
|
"\n",
|
|
@@ -96,7 +91,7 @@
|
|
|
96
91
|
},
|
|
97
92
|
{
|
|
98
93
|
"cell_type": "code",
|
|
99
|
-
"execution_count":
|
|
94
|
+
"execution_count": 21,
|
|
100
95
|
"id": "08bae670",
|
|
101
96
|
"metadata": {},
|
|
102
97
|
"outputs": [
|
|
@@ -104,7 +99,7 @@
|
|
|
104
99
|
"name": "stderr",
|
|
105
100
|
"output_type": "stream",
|
|
106
101
|
"text": [
|
|
107
|
-
"2026-02-
|
|
102
|
+
"2026-02-05 09:39:43.431 INFO [GPUdb] Connected to Kinetica! (host=http://localhost:19191 api=7.2.3.4 server=7.2.3.5)\n"
|
|
108
103
|
]
|
|
109
104
|
}
|
|
110
105
|
],
|
|
@@ -136,7 +131,7 @@
|
|
|
136
131
|
},
|
|
137
132
|
{
|
|
138
133
|
"cell_type": "code",
|
|
139
|
-
"execution_count":
|
|
134
|
+
"execution_count": 22,
|
|
140
135
|
"id": "f97853fa",
|
|
141
136
|
"metadata": {},
|
|
142
137
|
"outputs": [
|
|
@@ -162,11 +157,11 @@
|
|
|
162
157
|
"\n",
|
|
163
158
|
" birthdate \n",
|
|
164
159
|
"id \n",
|
|
165
|
-
"0 1999-08-
|
|
166
|
-
"1 1926-04-
|
|
167
|
-
"2 1935-08-
|
|
168
|
-
"3 1990-07-
|
|
169
|
-
"4 1932-
|
|
160
|
+
"0 1999-08-25 \n",
|
|
161
|
+
"1 1926-04-20 \n",
|
|
162
|
+
"2 1935-08-22 \n",
|
|
163
|
+
"3 1990-07-13 \n",
|
|
164
|
+
"4 1932-12-03 \n"
|
|
170
165
|
]
|
|
171
166
|
}
|
|
172
167
|
],
|
|
@@ -201,18 +196,10 @@
|
|
|
201
196
|
},
|
|
202
197
|
{
|
|
203
198
|
"cell_type": "code",
|
|
204
|
-
"execution_count":
|
|
199
|
+
"execution_count": 23,
|
|
205
200
|
"id": "7c08ff82",
|
|
206
201
|
"metadata": {},
|
|
207
202
|
"outputs": [
|
|
208
|
-
{
|
|
209
|
-
"name": "stderr",
|
|
210
|
-
"output_type": "stream",
|
|
211
|
-
"text": [
|
|
212
|
-
"/Users/chadjuliano/git/langchain-kinetica/.venv/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
|
213
|
-
" from .autonotebook import tqdm as notebook_tqdm\n"
|
|
214
|
-
]
|
|
215
|
-
},
|
|
216
203
|
{
|
|
217
204
|
"name": "stdout",
|
|
218
205
|
"output_type": "stream",
|
|
@@ -256,7 +243,7 @@
|
|
|
256
243
|
},
|
|
257
244
|
{
|
|
258
245
|
"cell_type": "code",
|
|
259
|
-
"execution_count":
|
|
246
|
+
"execution_count": 24,
|
|
260
247
|
"id": "a11ec3c0",
|
|
261
248
|
"metadata": {},
|
|
262
249
|
"outputs": [
|
|
@@ -270,9 +257,9 @@
|
|
|
270
257
|
"),\n",
|
|
271
258
|
"(\n",
|
|
272
259
|
" SAMPLES = ( \n",
|
|
273
|
-
" 'How many
|
|
260
|
+
" 'How many users born after 1970 are there?' = 'select count(1) as num_users\n",
|
|
274
261
|
" from demo.user_profiles\n",
|
|
275
|
-
" where
|
|
262
|
+
" where birthdate > ''1970-01-01'';' )\n",
|
|
276
263
|
")\n"
|
|
277
264
|
]
|
|
278
265
|
},
|
|
@@ -282,7 +269,7 @@
|
|
|
282
269
|
"1"
|
|
283
270
|
]
|
|
284
271
|
},
|
|
285
|
-
"execution_count":
|
|
272
|
+
"execution_count": 24,
|
|
286
273
|
"metadata": {},
|
|
287
274
|
"output_type": "execute_result"
|
|
288
275
|
}
|
|
@@ -295,11 +282,11 @@
|
|
|
295
282
|
"samples_ctx = GPUdbSamplesClause(\n",
|
|
296
283
|
" samples=[\n",
|
|
297
284
|
" (\n",
|
|
298
|
-
" \"How many
|
|
285
|
+
" \"How many users born after 1970 are there?\",\n",
|
|
299
286
|
" f\"\"\"\n",
|
|
300
287
|
" select count(1) as num_users\n",
|
|
301
288
|
" from {table_name}\n",
|
|
302
|
-
" where
|
|
289
|
+
" where birthdate > '1970-01-01';\n",
|
|
303
290
|
" \"\"\",\n",
|
|
304
291
|
" )\n",
|
|
305
292
|
" ]\n",
|
|
@@ -330,7 +317,7 @@
|
|
|
330
317
|
},
|
|
331
318
|
{
|
|
332
319
|
"cell_type": "code",
|
|
333
|
-
"execution_count":
|
|
320
|
+
"execution_count": 25,
|
|
334
321
|
"id": "6b65b70b",
|
|
335
322
|
"metadata": {},
|
|
336
323
|
"outputs": [
|
|
@@ -338,7 +325,7 @@
|
|
|
338
325
|
"name": "stdout",
|
|
339
326
|
"output_type": "stream",
|
|
340
327
|
"text": [
|
|
341
|
-
"
|
|
328
|
+
"================================ System Message ================================\n",
|
|
342
329
|
"\n",
|
|
343
330
|
"CREATE TABLE demo.user_profiles AS\n",
|
|
344
331
|
"(\n",
|
|
@@ -351,19 +338,19 @@
|
|
|
351
338
|
");\n",
|
|
352
339
|
"COMMENT ON TABLE demo.user_profiles IS 'Contains user profiles.';\n",
|
|
353
340
|
"\n",
|
|
354
|
-
"
|
|
341
|
+
"================================ Human Message =================================\n",
|
|
355
342
|
"\n",
|
|
356
|
-
"How many
|
|
343
|
+
"How many users born after 1970 are there?\n",
|
|
357
344
|
"\n",
|
|
358
|
-
"
|
|
345
|
+
"================================== Ai Message ==================================\n",
|
|
359
346
|
"\n",
|
|
360
347
|
"select count(1) as num_users\n",
|
|
361
348
|
" from demo.user_profiles\n",
|
|
362
|
-
" where
|
|
349
|
+
" where birthdate > '1970-01-01';\n",
|
|
363
350
|
"\n",
|
|
364
|
-
"
|
|
351
|
+
"================================ Human Message =================================\n",
|
|
365
352
|
"\n",
|
|
366
|
-
"
|
|
353
|
+
"{input}\n"
|
|
367
354
|
]
|
|
368
355
|
}
|
|
369
356
|
],
|
|
@@ -378,7 +365,7 @@
|
|
|
378
365
|
"\n",
|
|
379
366
|
"# Create the prompt template.\n",
|
|
380
367
|
"prompt_template = ChatPromptTemplate.from_messages(ctx_messages)\n",
|
|
381
|
-
"prompt_template.
|
|
368
|
+
"print(prompt_template.pretty_repr())"
|
|
382
369
|
]
|
|
383
370
|
},
|
|
384
371
|
{
|
|
@@ -393,7 +380,7 @@
|
|
|
393
380
|
},
|
|
394
381
|
{
|
|
395
382
|
"cell_type": "code",
|
|
396
|
-
"execution_count":
|
|
383
|
+
"execution_count": 26,
|
|
397
384
|
"id": "2f61013b",
|
|
398
385
|
"metadata": {},
|
|
399
386
|
"outputs": [],
|
|
@@ -418,7 +405,7 @@
|
|
|
418
405
|
},
|
|
419
406
|
{
|
|
420
407
|
"cell_type": "code",
|
|
421
|
-
"execution_count":
|
|
408
|
+
"execution_count": 27,
|
|
422
409
|
"id": "44e5177b",
|
|
423
410
|
"metadata": {},
|
|
424
411
|
"outputs": [
|
|
@@ -428,28 +415,27 @@
|
|
|
428
415
|
"text": [
|
|
429
416
|
"SQL: SELECT *\n",
|
|
430
417
|
"FROM demo.user_profiles\n",
|
|
431
|
-
"WHERE
|
|
432
|
-
"
|
|
433
|
-
"
|
|
434
|
-
"
|
|
435
|
-
"
|
|
436
|
-
"
|
|
437
|
-
"
|
|
438
|
-
"4 carl19 Amanda Potts F \n",
|
|
418
|
+
"WHERE birthdate > '1990-01-01';\n",
|
|
419
|
+
" username name sex \\\n",
|
|
420
|
+
"0 eduardo69 Haley Beck F \n",
|
|
421
|
+
"1 melissa49 Wendy Reese F \n",
|
|
422
|
+
"2 james26 Patricia Potter F \n",
|
|
423
|
+
"3 mooreandrew Wendy Ramirez F \n",
|
|
424
|
+
"4 melissabutler Alexa Kelly F \n",
|
|
439
425
|
"\n",
|
|
440
426
|
" address mail \\\n",
|
|
441
|
-
"0
|
|
442
|
-
"1
|
|
443
|
-
"2
|
|
444
|
-
"3
|
|
445
|
-
"4
|
|
427
|
+
"0 59836 Carla Causeway Suite 939\\nPort Eugene, I... meltondenise@yahoo.com \n",
|
|
428
|
+
"1 6408 Christopher Hill Apt. 459\\nNew Benjamin, ... dadams@gmail.com \n",
|
|
429
|
+
"2 7977 Jonathan Meadow\\nJerryside, OH 55205 jpatrick@gmail.com \n",
|
|
430
|
+
"3 8089 Gonzalez Fields\\nJordanville, KS 22824 mathew05@hotmail.com \n",
|
|
431
|
+
"4 1904 Burke Roads\\nPort Anne, DE 81252 douglas38@yahoo.com \n",
|
|
446
432
|
"\n",
|
|
447
433
|
" birthdate \n",
|
|
448
|
-
"0
|
|
449
|
-
"1
|
|
450
|
-
"2
|
|
451
|
-
"3
|
|
452
|
-
"4
|
|
434
|
+
"0 1999-08-25 \n",
|
|
435
|
+
"1 1990-07-13 \n",
|
|
436
|
+
"2 2010-03-21 \n",
|
|
437
|
+
"3 2000-03-25 \n",
|
|
438
|
+
"4 2023-02-01 \n"
|
|
453
439
|
]
|
|
454
440
|
}
|
|
455
441
|
],
|
|
@@ -457,7 +443,7 @@
|
|
|
457
443
|
"# Here you must ask a question relevant to the LLM context provided in the\n",
|
|
458
444
|
"# prompt template.\n",
|
|
459
445
|
"response: KineticaSqlResponse = chain.invoke(\n",
|
|
460
|
-
" {\"input\": \"What
|
|
446
|
+
" {\"input\": \"What users were born after 1990?\"}\n",
|
|
461
447
|
")\n",
|
|
462
448
|
"\n",
|
|
463
449
|
"print(f\"SQL: {response.sql}\")\n",
|
|
@@ -467,7 +453,7 @@
|
|
|
467
453
|
],
|
|
468
454
|
"metadata": {
|
|
469
455
|
"kernelspec": {
|
|
470
|
-
"display_name": "langchain-kinetica (3.
|
|
456
|
+
"display_name": "langchain-kinetica (3.13.9)",
|
|
471
457
|
"language": "python",
|
|
472
458
|
"name": "python3"
|
|
473
459
|
},
|
|
@@ -481,7 +467,7 @@
|
|
|
481
467
|
"name": "python",
|
|
482
468
|
"nbconvert_exporter": "python",
|
|
483
469
|
"pygments_lexer": "ipython3",
|
|
484
|
-
"version": "3.
|
|
470
|
+
"version": "3.13.9"
|
|
485
471
|
}
|
|
486
472
|
},
|
|
487
473
|
"nbformat": 4,
|
|
@@ -10,7 +10,8 @@
|
|
|
10
10
|
},
|
|
11
11
|
"source": [
|
|
12
12
|
"---\n",
|
|
13
|
-
"title: Kinetica
|
|
13
|
+
"title: \"Kinetica document loader integration\"\n",
|
|
14
|
+
"description: \"Integrate with the Kinetica document loader using LangChain Python.\"\n",
|
|
14
15
|
"---"
|
|
15
16
|
]
|
|
16
17
|
},
|
|
@@ -25,11 +26,17 @@
|
|
|
25
26
|
]
|
|
26
27
|
},
|
|
27
28
|
{
|
|
28
|
-
"cell_type": "
|
|
29
|
+
"cell_type": "code",
|
|
30
|
+
"execution_count": null,
|
|
29
31
|
"id": "1259dbff",
|
|
30
|
-
"metadata": {
|
|
32
|
+
"metadata": {
|
|
33
|
+
"vscode": {
|
|
34
|
+
"languageId": "shellscript"
|
|
35
|
+
}
|
|
36
|
+
},
|
|
37
|
+
"outputs": [],
|
|
31
38
|
"source": [
|
|
32
|
-
"
|
|
39
|
+
"pip install -qU langchain-kinetica"
|
|
33
40
|
]
|
|
34
41
|
},
|
|
35
42
|
{
|
|
@@ -39,7 +46,7 @@
|
|
|
39
46
|
"source": [
|
|
40
47
|
"You must set the database connection in the following environment variables. If you are using a virtual environment you can set them in the `.env` file of the project:\n",
|
|
41
48
|
"\n",
|
|
42
|
-
"* `KINETICA_URL`: Database connection URL (e.g. http://localhost:9191)\n",
|
|
49
|
+
"* `KINETICA_URL`: Database connection URL (e.g. `http://localhost:9191`)\n",
|
|
43
50
|
"* `KINETICA_USER`: Database user\n",
|
|
44
51
|
"* `KINETICA_PASSWD`: Secure password."
|
|
45
52
|
]
|
|
@@ -54,7 +61,7 @@
|
|
|
54
61
|
"name": "stderr",
|
|
55
62
|
"output_type": "stream",
|
|
56
63
|
"text": [
|
|
57
|
-
"2026-02-
|
|
64
|
+
"2026-02-05 09:32:43.965 INFO [GPUdb] Connected to Kinetica! (host=http://localhost:19191 api=7.2.3.4 server=7.2.3.5)\n"
|
|
58
65
|
]
|
|
59
66
|
}
|
|
60
67
|
],
|
|
@@ -123,7 +130,7 @@
|
|
|
123
130
|
],
|
|
124
131
|
"metadata": {
|
|
125
132
|
"kernelspec": {
|
|
126
|
-
"display_name": "langchain-kinetica (3.
|
|
133
|
+
"display_name": "langchain-kinetica (3.13.9)",
|
|
127
134
|
"language": "python",
|
|
128
135
|
"name": "python3"
|
|
129
136
|
},
|
|
@@ -137,7 +144,7 @@
|
|
|
137
144
|
"name": "python",
|
|
138
145
|
"nbconvert_exporter": "python",
|
|
139
146
|
"pygments_lexer": "ipython3",
|
|
140
|
-
"version": "3.
|
|
147
|
+
"version": "3.13.9"
|
|
141
148
|
}
|
|
142
149
|
},
|
|
143
150
|
"nbformat": 4,
|
|
@@ -3,10 +3,15 @@
|
|
|
3
3
|
{
|
|
4
4
|
"cell_type": "raw",
|
|
5
5
|
"id": "a9391f38",
|
|
6
|
-
"metadata": {
|
|
6
|
+
"metadata": {
|
|
7
|
+
"vscode": {
|
|
8
|
+
"languageId": "raw"
|
|
9
|
+
}
|
|
10
|
+
},
|
|
7
11
|
"source": [
|
|
8
12
|
"---\n",
|
|
9
|
-
"title: Kinetica\n",
|
|
13
|
+
"title: \"Kinetica integrations\"\n",
|
|
14
|
+
"description: \"Integrate with Kinetica using LangChain Python.\"\n",
|
|
10
15
|
"---"
|
|
11
16
|
]
|
|
12
17
|
},
|
|
@@ -29,7 +34,7 @@
|
|
|
29
34
|
},
|
|
30
35
|
{
|
|
31
36
|
"cell_type": "code",
|
|
32
|
-
"execution_count":
|
|
37
|
+
"execution_count": 1,
|
|
33
38
|
"id": "8921fe1a",
|
|
34
39
|
"metadata": {},
|
|
35
40
|
"outputs": [],
|
|
@@ -52,7 +57,7 @@
|
|
|
52
57
|
},
|
|
53
58
|
{
|
|
54
59
|
"cell_type": "code",
|
|
55
|
-
"execution_count":
|
|
60
|
+
"execution_count": 2,
|
|
56
61
|
"id": "b4343663",
|
|
57
62
|
"metadata": {},
|
|
58
63
|
"outputs": [],
|
|
@@ -75,7 +80,7 @@
|
|
|
75
80
|
},
|
|
76
81
|
{
|
|
77
82
|
"cell_type": "code",
|
|
78
|
-
"execution_count":
|
|
83
|
+
"execution_count": 3,
|
|
79
84
|
"id": "ea66a843",
|
|
80
85
|
"metadata": {},
|
|
81
86
|
"outputs": [],
|
|
@@ -98,7 +103,7 @@
|
|
|
98
103
|
],
|
|
99
104
|
"metadata": {
|
|
100
105
|
"kernelspec": {
|
|
101
|
-
"display_name": "langchain-kinetica (3.
|
|
106
|
+
"display_name": "langchain-kinetica (3.13.9)",
|
|
102
107
|
"language": "python",
|
|
103
108
|
"name": "python3"
|
|
104
109
|
},
|
|
@@ -112,7 +117,7 @@
|
|
|
112
117
|
"name": "python",
|
|
113
118
|
"nbconvert_exporter": "python",
|
|
114
119
|
"pygments_lexer": "ipython3",
|
|
115
|
-
"version": "3.
|
|
120
|
+
"version": "3.13.9"
|
|
116
121
|
}
|
|
117
122
|
},
|
|
118
123
|
"nbformat": 4,
|
|
@@ -9,10 +9,15 @@
|
|
|
9
9
|
{
|
|
10
10
|
"cell_type": "raw",
|
|
11
11
|
"id": "b51c964b",
|
|
12
|
-
"metadata": {
|
|
12
|
+
"metadata": {
|
|
13
|
+
"vscode": {
|
|
14
|
+
"languageId": "raw"
|
|
15
|
+
}
|
|
16
|
+
},
|
|
13
17
|
"source": [
|
|
14
18
|
"---\n",
|
|
15
|
-
"title: Kinetica
|
|
19
|
+
"title: \"Kinetica vectorstore based retriever integration\"\n",
|
|
20
|
+
"description: \"Integrate with the Kinetica vectorstore based retriever using LangChain Python.\"\n",
|
|
16
21
|
"---"
|
|
17
22
|
]
|
|
18
23
|
},
|
|
@@ -35,21 +40,16 @@
|
|
|
35
40
|
},
|
|
36
41
|
{
|
|
37
42
|
"cell_type": "code",
|
|
38
|
-
"execution_count":
|
|
43
|
+
"execution_count": null,
|
|
39
44
|
"id": "d84650c5",
|
|
40
|
-
"metadata": {
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
"name": "stdout",
|
|
44
|
-
"output_type": "stream",
|
|
45
|
-
"text": [
|
|
46
|
-
"/Users/chadjuliano/git/langchain-kinetica/.venv/bin/python: No module named pip\n",
|
|
47
|
-
"Note: you may need to restart the kernel to use updated packages.\n"
|
|
48
|
-
]
|
|
45
|
+
"metadata": {
|
|
46
|
+
"vscode": {
|
|
47
|
+
"languageId": "shellscript"
|
|
49
48
|
}
|
|
50
|
-
|
|
49
|
+
},
|
|
50
|
+
"outputs": [],
|
|
51
51
|
"source": [
|
|
52
|
-
"
|
|
52
|
+
"pip install -qU langchain-kinetica langchain-community langchain-openai"
|
|
53
53
|
]
|
|
54
54
|
},
|
|
55
55
|
{
|
|
@@ -62,7 +62,7 @@
|
|
|
62
62
|
},
|
|
63
63
|
{
|
|
64
64
|
"cell_type": "code",
|
|
65
|
-
"execution_count":
|
|
65
|
+
"execution_count": 1,
|
|
66
66
|
"id": "150f0c9a",
|
|
67
67
|
"metadata": {},
|
|
68
68
|
"outputs": [],
|
|
@@ -85,14 +85,14 @@
|
|
|
85
85
|
"source": [
|
|
86
86
|
"You must set the database connection in the following environment variables. If you are using a virtual environment you can set them in the `.env` file of the project:\n",
|
|
87
87
|
"\n",
|
|
88
|
-
"* `KINETICA_URL`: Database connection URL (e.g. http://localhost:9191)\n",
|
|
88
|
+
"* `KINETICA_URL`: Database connection URL (e.g. `http://localhost:9191`)\n",
|
|
89
89
|
"* `KINETICA_USER`: Database user\n",
|
|
90
90
|
"* `KINETICA_PASSWD`: Secure password."
|
|
91
91
|
]
|
|
92
92
|
},
|
|
93
93
|
{
|
|
94
94
|
"cell_type": "code",
|
|
95
|
-
"execution_count":
|
|
95
|
+
"execution_count": 2,
|
|
96
96
|
"id": "0b031386",
|
|
97
97
|
"metadata": {},
|
|
98
98
|
"outputs": [
|
|
@@ -100,16 +100,16 @@
|
|
|
100
100
|
"name": "stderr",
|
|
101
101
|
"output_type": "stream",
|
|
102
102
|
"text": [
|
|
103
|
-
"2026-02-
|
|
103
|
+
"2026-02-05 09:33:25.278 INFO [GPUdb] Connected to Kinetica! (host=http://localhost:19191 api=7.2.3.4 server=7.2.3.5)\n"
|
|
104
104
|
]
|
|
105
105
|
},
|
|
106
106
|
{
|
|
107
107
|
"data": {
|
|
108
108
|
"text/plain": [
|
|
109
|
-
"KineticaSettings(kdbc=<gpudb.gpudb.GPUdb object at
|
|
109
|
+
"KineticaSettings(kdbc=<gpudb.gpudb.GPUdb object at 0x1182c6120>, database='langchain', table='langchain_kinetica_embeddings', metric='l2')"
|
|
110
110
|
]
|
|
111
111
|
},
|
|
112
|
-
"execution_count":
|
|
112
|
+
"execution_count": 2,
|
|
113
113
|
"metadata": {},
|
|
114
114
|
"output_type": "execute_result"
|
|
115
115
|
}
|
|
@@ -135,7 +135,7 @@
|
|
|
135
135
|
},
|
|
136
136
|
{
|
|
137
137
|
"cell_type": "code",
|
|
138
|
-
"execution_count":
|
|
138
|
+
"execution_count": 3,
|
|
139
139
|
"id": "0cd0ac4d",
|
|
140
140
|
"metadata": {},
|
|
141
141
|
"outputs": [],
|
|
@@ -173,7 +173,7 @@
|
|
|
173
173
|
},
|
|
174
174
|
{
|
|
175
175
|
"cell_type": "code",
|
|
176
|
-
"execution_count":
|
|
176
|
+
"execution_count": 4,
|
|
177
177
|
"id": "4f92e57f",
|
|
178
178
|
"metadata": {},
|
|
179
179
|
"outputs": [
|
|
@@ -205,7 +205,7 @@
|
|
|
205
205
|
],
|
|
206
206
|
"metadata": {
|
|
207
207
|
"kernelspec": {
|
|
208
|
-
"display_name": "langchain-kinetica (3.
|
|
208
|
+
"display_name": "langchain-kinetica (3.13.9)",
|
|
209
209
|
"language": "python",
|
|
210
210
|
"name": "python3"
|
|
211
211
|
},
|
|
@@ -219,7 +219,7 @@
|
|
|
219
219
|
"name": "python",
|
|
220
220
|
"nbconvert_exporter": "python",
|
|
221
221
|
"pygments_lexer": "ipython3",
|
|
222
|
-
"version": "3.
|
|
222
|
+
"version": "3.13.9"
|
|
223
223
|
}
|
|
224
224
|
},
|
|
225
225
|
"nbformat": 4,
|
|
@@ -3,10 +3,15 @@
|
|
|
3
3
|
{
|
|
4
4
|
"cell_type": "raw",
|
|
5
5
|
"id": "c602036b",
|
|
6
|
-
"metadata": {
|
|
6
|
+
"metadata": {
|
|
7
|
+
"vscode": {
|
|
8
|
+
"languageId": "raw"
|
|
9
|
+
}
|
|
10
|
+
},
|
|
7
11
|
"source": [
|
|
8
12
|
"---\n",
|
|
9
|
-
"title: Kinetica
|
|
13
|
+
"title: \"Kinetica vectorstore integration\"\n",
|
|
14
|
+
"description: \"Integrate with the Kinetica VectorStore API vector store using LangChain Python.\"\n",
|
|
10
15
|
"---"
|
|
11
16
|
]
|
|
12
17
|
},
|
|
@@ -29,22 +34,16 @@
|
|
|
29
34
|
},
|
|
30
35
|
{
|
|
31
36
|
"cell_type": "code",
|
|
32
|
-
"execution_count":
|
|
37
|
+
"execution_count": null,
|
|
33
38
|
"id": "7789c336",
|
|
34
|
-
"metadata": {
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
"name": "stdout",
|
|
38
|
-
"output_type": "stream",
|
|
39
|
-
"text": [
|
|
40
|
-
"/Users/chadjuliano/git/langchain-kinetica/.venv/bin/python: No module named pip\n",
|
|
41
|
-
"Note: you may need to restart the kernel to use updated packages.\n"
|
|
42
|
-
]
|
|
39
|
+
"metadata": {
|
|
40
|
+
"vscode": {
|
|
41
|
+
"languageId": "shellscript"
|
|
43
42
|
}
|
|
44
|
-
|
|
43
|
+
},
|
|
44
|
+
"outputs": [],
|
|
45
45
|
"source": [
|
|
46
|
-
"
|
|
47
|
-
"%pip install -qU langchain-kinetica"
|
|
46
|
+
"pip install -qU langchain-kinetica"
|
|
48
47
|
]
|
|
49
48
|
},
|
|
50
49
|
{
|
|
@@ -57,7 +56,7 @@
|
|
|
57
56
|
},
|
|
58
57
|
{
|
|
59
58
|
"cell_type": "code",
|
|
60
|
-
"execution_count":
|
|
59
|
+
"execution_count": 1,
|
|
61
60
|
"id": "cbfa07aa",
|
|
62
61
|
"metadata": {},
|
|
63
62
|
"outputs": [],
|
|
@@ -80,14 +79,14 @@
|
|
|
80
79
|
"source": [
|
|
81
80
|
"You must set the database connection in the following environment variables. If you are using a virtual environment you can set them in the `.env` file of the project:\n",
|
|
82
81
|
"\n",
|
|
83
|
-
"* `KINETICA_URL`: Database connection URL (e.g. http://localhost:9191)\n",
|
|
82
|
+
"* `KINETICA_URL`: Database connection URL (e.g. `http://localhost:9191`)\n",
|
|
84
83
|
"* `KINETICA_USER`: Database user\n",
|
|
85
84
|
"* `KINETICA_PASSWD`: Secure password."
|
|
86
85
|
]
|
|
87
86
|
},
|
|
88
87
|
{
|
|
89
88
|
"cell_type": "code",
|
|
90
|
-
"execution_count":
|
|
89
|
+
"execution_count": 2,
|
|
91
90
|
"id": "9863e83c",
|
|
92
91
|
"metadata": {},
|
|
93
92
|
"outputs": [
|
|
@@ -95,16 +94,16 @@
|
|
|
95
94
|
"name": "stderr",
|
|
96
95
|
"output_type": "stream",
|
|
97
96
|
"text": [
|
|
98
|
-
"2026-02-
|
|
97
|
+
"2026-02-05 09:33:45.777 INFO [GPUdb] Connected to Kinetica! (host=http://localhost:19191 api=7.2.3.4 server=7.2.3.5)\n"
|
|
99
98
|
]
|
|
100
99
|
},
|
|
101
100
|
{
|
|
102
101
|
"data": {
|
|
103
102
|
"text/plain": [
|
|
104
|
-
"KineticaSettings(kdbc=<gpudb.gpudb.GPUdb object at
|
|
103
|
+
"KineticaSettings(kdbc=<gpudb.gpudb.GPUdb object at 0x121fb7a10>, database='langchain', table='langchain_kinetica_embeddings', metric='l2')"
|
|
105
104
|
]
|
|
106
105
|
},
|
|
107
|
-
"execution_count":
|
|
106
|
+
"execution_count": 2,
|
|
108
107
|
"metadata": {},
|
|
109
108
|
"output_type": "execute_result"
|
|
110
109
|
}
|
|
@@ -123,26 +122,26 @@
|
|
|
123
122
|
},
|
|
124
123
|
{
|
|
125
124
|
"cell_type": "code",
|
|
126
|
-
"execution_count":
|
|
125
|
+
"execution_count": 3,
|
|
127
126
|
"id": "27c76b26",
|
|
128
127
|
"metadata": {},
|
|
129
128
|
"outputs": [
|
|
130
129
|
{
|
|
131
130
|
"data": {
|
|
132
131
|
"text/plain": [
|
|
133
|
-
"['
|
|
134
|
-
" '
|
|
135
|
-
" '
|
|
136
|
-
" '
|
|
137
|
-
" '
|
|
138
|
-
" '
|
|
139
|
-
" '
|
|
140
|
-
" '
|
|
141
|
-
" '
|
|
142
|
-
" '
|
|
132
|
+
"['2f95b73e-4f3e-4b13-9727-37a03476b5ea',\n",
|
|
133
|
+
" 'a4c95843-2ac0-4553-93c6-8b4ae5c7b4f8',\n",
|
|
134
|
+
" '7bebf4de-b4d5-4808-a671-2dea138e5752',\n",
|
|
135
|
+
" 'cf105eae-0e2f-48b7-b0b0-583df82354ea',\n",
|
|
136
|
+
" '38241a48-2e6c-4ac7-807f-96783f236981',\n",
|
|
137
|
+
" 'db9e5201-b56c-4df9-b523-b74177394c1b',\n",
|
|
138
|
+
" 'f855de29-eb5b-48c6-a215-1fea14d71f33',\n",
|
|
139
|
+
" '2bf1dd87-00cc-454b-94fb-15556b327770',\n",
|
|
140
|
+
" '3f86fac1-ef1c-42e9-b3b5-3d98582b20bb',\n",
|
|
141
|
+
" 'd59f71d8-58c3-4e6b-9960-907b654249d5']"
|
|
143
142
|
]
|
|
144
143
|
},
|
|
145
|
-
"execution_count":
|
|
144
|
+
"execution_count": 3,
|
|
146
145
|
"metadata": {},
|
|
147
146
|
"output_type": "execute_result"
|
|
148
147
|
}
|
|
@@ -235,7 +234,7 @@
|
|
|
235
234
|
},
|
|
236
235
|
{
|
|
237
236
|
"cell_type": "code",
|
|
238
|
-
"execution_count":
|
|
237
|
+
"execution_count": 4,
|
|
239
238
|
"id": "6b80f089",
|
|
240
239
|
"metadata": {},
|
|
241
240
|
"outputs": [
|
|
@@ -249,7 +248,7 @@
|
|
|
249
248
|
"* LangGraph is the best framework for building stateful, agentic applications! [{'source': 'tweet'}]\n",
|
|
250
249
|
"\n",
|
|
251
250
|
"Similarity search with score\n",
|
|
252
|
-
"* [SIM=0.
|
|
251
|
+
"* [SIM=0.945382] The weather forecast for tomorrow is cloudy and overcast, with a high of 62 degrees. [{'source': 'news'}]\n"
|
|
253
252
|
]
|
|
254
253
|
}
|
|
255
254
|
],
|
|
@@ -299,7 +298,7 @@
|
|
|
299
298
|
},
|
|
300
299
|
{
|
|
301
300
|
"cell_type": "code",
|
|
302
|
-
"execution_count":
|
|
301
|
+
"execution_count": 5,
|
|
303
302
|
"id": "74d5f1f8",
|
|
304
303
|
"metadata": {},
|
|
305
304
|
"outputs": [
|
|
@@ -307,8 +306,8 @@
|
|
|
307
306
|
"name": "stdout",
|
|
308
307
|
"output_type": "stream",
|
|
309
308
|
"text": [
|
|
310
|
-
"First result: (Document(metadata={}, page_content='foo'), 0.
|
|
311
|
-
"Second result: (Document(metadata={'source': 'tweet'}, page_content='Building an exciting new project with LangChain - come check it out!'), 1.
|
|
309
|
+
"First result: (Document(metadata={}, page_content='foo'), 0.0)\n",
|
|
310
|
+
"Second result: (Document(metadata={'source': 'tweet'}, page_content='Building an exciting new project with LangChain - come check it out!'), 1.2609258890151978)\n"
|
|
312
311
|
]
|
|
313
312
|
}
|
|
314
313
|
],
|
|
@@ -340,7 +339,7 @@
|
|
|
340
339
|
},
|
|
341
340
|
{
|
|
342
341
|
"cell_type": "code",
|
|
343
|
-
"execution_count":
|
|
342
|
+
"execution_count": 6,
|
|
344
343
|
"id": "335bae64",
|
|
345
344
|
"metadata": {},
|
|
346
345
|
"outputs": [
|
|
@@ -348,10 +347,10 @@
|
|
|
348
347
|
"data": {
|
|
349
348
|
"text/plain": [
|
|
350
349
|
"(Document(metadata={'source': 'tweet'}, page_content='Building an exciting new project with LangChain - come check it out!'),\n",
|
|
351
|
-
" 1.
|
|
350
|
+
" 1.2609500885009766)"
|
|
352
351
|
]
|
|
353
352
|
},
|
|
354
|
-
"execution_count":
|
|
353
|
+
"execution_count": 6,
|
|
355
354
|
"metadata": {},
|
|
356
355
|
"output_type": "execute_result"
|
|
357
356
|
}
|
|
@@ -379,17 +378,17 @@
|
|
|
379
378
|
},
|
|
380
379
|
{
|
|
381
380
|
"cell_type": "code",
|
|
382
|
-
"execution_count":
|
|
381
|
+
"execution_count": 7,
|
|
383
382
|
"id": "f1519bba",
|
|
384
383
|
"metadata": {},
|
|
385
384
|
"outputs": [
|
|
386
385
|
{
|
|
387
386
|
"data": {
|
|
388
387
|
"text/plain": [
|
|
389
|
-
"VectorStoreRetriever(tags=['KineticaVectorstore', 'OpenAIEmbeddings'], vectorstore=<langchain_kinetica.vectorstores.KineticaVectorstore object at
|
|
388
|
+
"VectorStoreRetriever(tags=['KineticaVectorstore', 'OpenAIEmbeddings'], vectorstore=<langchain_kinetica.vectorstores.KineticaVectorstore object at 0x12268c050>, search_kwargs={})"
|
|
390
389
|
]
|
|
391
390
|
},
|
|
392
|
-
"execution_count":
|
|
391
|
+
"execution_count": 7,
|
|
393
392
|
"metadata": {},
|
|
394
393
|
"output_type": "execute_result"
|
|
395
394
|
}
|
|
@@ -404,7 +403,7 @@
|
|
|
404
403
|
],
|
|
405
404
|
"metadata": {
|
|
406
405
|
"kernelspec": {
|
|
407
|
-
"display_name": "langchain-kinetica (3.
|
|
406
|
+
"display_name": "langchain-kinetica (3.13.9)",
|
|
408
407
|
"language": "python",
|
|
409
408
|
"name": "python3"
|
|
410
409
|
},
|
|
@@ -418,7 +417,7 @@
|
|
|
418
417
|
"name": "python",
|
|
419
418
|
"nbconvert_exporter": "python",
|
|
420
419
|
"pygments_lexer": "ipython3",
|
|
421
|
-
"version": "3.
|
|
420
|
+
"version": "3.13.9"
|
|
422
421
|
}
|
|
423
422
|
},
|
|
424
423
|
"nbformat": 4,
|
|
@@ -10,7 +10,7 @@ readme = "README.md"
|
|
|
10
10
|
authors = [
|
|
11
11
|
{name = "Chad Juliano", email = "cjuliano@kinetica.com"},
|
|
12
12
|
]
|
|
13
|
-
version = "1.
|
|
13
|
+
version = "1.2.0"
|
|
14
14
|
requires-python = ">=3.10.0,<4.0.0"
|
|
15
15
|
dependencies = [
|
|
16
16
|
"langchain-core>=1.1.0,<2.0.0",
|
|
@@ -37,6 +37,7 @@ test = [
|
|
|
37
37
|
]
|
|
38
38
|
jupyter = [
|
|
39
39
|
"ipykernel>=7.1.0",
|
|
40
|
+
"ipywidgets>=8.1.8",
|
|
40
41
|
"nbconvert>=7.17.0",
|
|
41
42
|
]
|
|
42
43
|
lint = ["ruff>=0.13.1,<0.14.0"]
|
{langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/tests/integration_tests/test_chat_models.py
RENAMED
|
@@ -5,6 +5,7 @@ from collections.abc import Generator
|
|
|
5
5
|
|
|
6
6
|
import faker
|
|
7
7
|
import pandas as pd
|
|
8
|
+
import pytest
|
|
8
9
|
from gpudb import GPUdb, GPUdbTable
|
|
9
10
|
from langchain_core.messages import (
|
|
10
11
|
AIMessage,
|
|
@@ -25,6 +26,13 @@ LOG = logging.getLogger(__name__)
|
|
|
25
26
|
SCHEMA_NAME = "langchain_test"
|
|
26
27
|
|
|
27
28
|
|
|
29
|
+
@pytest.fixture(scope="module")
|
|
30
|
+
def chat_kinetica() -> ChatKinetica:
|
|
31
|
+
"""Fixture to create a `ChatKinetica` instance for the tests."""
|
|
32
|
+
kdbc = GPUdb.get_connection()
|
|
33
|
+
return ChatKinetica(kdbc=kdbc) # type: ignore[call-arg]
|
|
34
|
+
|
|
35
|
+
|
|
28
36
|
class TestChatKinetica:
|
|
29
37
|
"""Integration tests for `Kinetica` chat models.
|
|
30
38
|
|
|
@@ -35,12 +43,6 @@ class TestChatKinetica:
|
|
|
35
43
|
export KINETICA_PASSWORD="xxx"
|
|
36
44
|
```
|
|
37
45
|
|
|
38
|
-
You must have `gpudb[dataframe]` and `faker` packages installed to run these
|
|
39
|
-
tests. Install them with:
|
|
40
|
-
```
|
|
41
|
-
uv pip install 'gpudb[dataframe]' faker
|
|
42
|
-
```
|
|
43
|
-
|
|
44
46
|
For more information see https://docs.kinetica.com/7.1/sql-gpt/concepts/.
|
|
45
47
|
|
|
46
48
|
These integration tests follow a workflow:
|
|
@@ -60,30 +62,30 @@ class TestChatKinetica:
|
|
|
60
62
|
context_name = f"{SCHEMA_NAME}.test_llm_ctx"
|
|
61
63
|
num_records = 100
|
|
62
64
|
|
|
63
|
-
@classmethod
|
|
64
65
|
# @pytest.mark.vcr()
|
|
65
|
-
def test_setup(
|
|
66
|
+
def test_setup(self, chat_kinetica: ChatKinetica) -> None:
|
|
66
67
|
"""Create the connection, test table, and LLM context."""
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
68
|
+
self._create_test_table(
|
|
69
|
+
kinetica_dbc=chat_kinetica.kdbc,
|
|
70
|
+
table_name=self.table_name,
|
|
71
|
+
num_records=self.num_records,
|
|
72
|
+
)
|
|
73
|
+
self._create_llm_context(
|
|
74
|
+
kinetica_dbc=chat_kinetica.kdbc, context_name=self.context_name
|
|
70
75
|
)
|
|
71
|
-
cls._create_llm_context(kinetica_dbc=kdbc, context_name=cls.context_name)
|
|
72
76
|
|
|
73
77
|
# @pytest.mark.vcr()
|
|
74
|
-
def test_create_llm(self) -> None:
|
|
78
|
+
def test_create_llm(self, chat_kinetica: ChatKinetica) -> None:
|
|
75
79
|
"""Create an LLM instance."""
|
|
76
|
-
|
|
77
|
-
LOG.info(kinetica_llm._identifying_params)
|
|
80
|
+
LOG.info(chat_kinetica._identifying_params)
|
|
78
81
|
|
|
79
|
-
assert isinstance(
|
|
80
|
-
assert
|
|
82
|
+
assert isinstance(chat_kinetica.kdbc, GPUdb)
|
|
83
|
+
assert chat_kinetica._llm_type == "kinetica-sqlassist"
|
|
81
84
|
|
|
82
85
|
# @pytest.mark.vcr()
|
|
83
|
-
def test_load_context(self) -> None:
|
|
86
|
+
def test_load_context(self, chat_kinetica: ChatKinetica) -> None:
|
|
84
87
|
"""Load the LLM context from the DB."""
|
|
85
|
-
|
|
86
|
-
ctx_messages = kinetica_llm.load_messages_from_context(self.context_name)
|
|
88
|
+
ctx_messages = chat_kinetica.load_messages_from_context(self.context_name)
|
|
87
89
|
|
|
88
90
|
system_message = ctx_messages[0]
|
|
89
91
|
assert isinstance(system_message, SystemMessage)
|
|
@@ -93,15 +95,13 @@ class TestChatKinetica:
|
|
|
93
95
|
assert last_question.content == "How many male users are there?"
|
|
94
96
|
|
|
95
97
|
# @pytest.mark.vcr()
|
|
96
|
-
def test_generate(self) -> None:
|
|
98
|
+
def test_generate(self, chat_kinetica: ChatKinetica) -> None:
|
|
97
99
|
"""Generate SQL from a chain."""
|
|
98
|
-
kinetica_llm = ChatKinetica() # type: ignore[call-arg]
|
|
99
|
-
|
|
100
100
|
# create chain
|
|
101
|
-
ctx_messages =
|
|
101
|
+
ctx_messages = chat_kinetica.load_messages_from_context(self.context_name)
|
|
102
102
|
ctx_messages.append(("human", "{input}"))
|
|
103
103
|
prompt_template = ChatPromptTemplate.from_messages(ctx_messages)
|
|
104
|
-
chain = prompt_template |
|
|
104
|
+
chain = prompt_template | chat_kinetica
|
|
105
105
|
|
|
106
106
|
resp_message = chain.invoke(
|
|
107
107
|
{"input": "What are the female users ordered by username?"}
|
|
@@ -110,18 +110,16 @@ class TestChatKinetica:
|
|
|
110
110
|
assert isinstance(resp_message, AIMessage)
|
|
111
111
|
|
|
112
112
|
# @pytest.mark.vcr()
|
|
113
|
-
def test_full_chain(self) -> None:
|
|
113
|
+
def test_full_chain(self, chat_kinetica: ChatKinetica) -> None:
|
|
114
114
|
"""Generate SQL from a chain and execute the query."""
|
|
115
|
-
kinetica_llm = ChatKinetica() # type: ignore[call-arg]
|
|
116
|
-
|
|
117
115
|
# create chain
|
|
118
|
-
ctx_messages =
|
|
116
|
+
ctx_messages = chat_kinetica.load_messages_from_context(self.context_name)
|
|
119
117
|
ctx_messages.append(("human", "{input}"))
|
|
120
118
|
prompt_template = ChatPromptTemplate.from_messages(ctx_messages)
|
|
121
119
|
chain = (
|
|
122
120
|
prompt_template
|
|
123
|
-
|
|
|
124
|
-
| KineticaSqlOutputParser(kdbc=
|
|
121
|
+
| chat_kinetica
|
|
122
|
+
| KineticaSqlOutputParser(kdbc=chat_kinetica.kdbc)
|
|
125
123
|
)
|
|
126
124
|
sql_response: KineticaSqlResponse = chain.invoke(
|
|
127
125
|
{"input": "What are the female users ordered by username?"}
|
|
@@ -961,6 +961,23 @@ wheels = [
|
|
|
961
961
|
{ url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" },
|
|
962
962
|
]
|
|
963
963
|
|
|
964
|
+
[[package]]
|
|
965
|
+
name = "ipywidgets"
|
|
966
|
+
version = "8.1.8"
|
|
967
|
+
source = { registry = "https://pypi.org/simple" }
|
|
968
|
+
dependencies = [
|
|
969
|
+
{ name = "comm" },
|
|
970
|
+
{ name = "ipython", version = "8.38.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
|
971
|
+
{ name = "ipython", version = "9.10.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
|
|
972
|
+
{ name = "jupyterlab-widgets" },
|
|
973
|
+
{ name = "traitlets" },
|
|
974
|
+
{ name = "widgetsnbextension" },
|
|
975
|
+
]
|
|
976
|
+
sdist = { url = "https://files.pythonhosted.org/packages/4c/ae/c5ce1edc1afe042eadb445e95b0671b03cee61895264357956e61c0d2ac0/ipywidgets-8.1.8.tar.gz", hash = "sha256:61f969306b95f85fba6b6986b7fe45d73124d1d9e3023a8068710d47a22ea668", size = 116739, upload-time = "2025-11-01T21:18:12.393Z" }
|
|
977
|
+
wheels = [
|
|
978
|
+
{ url = "https://files.pythonhosted.org/packages/56/6d/0d9848617b9f753b87f214f1c682592f7ca42de085f564352f10f0843026/ipywidgets-8.1.8-py3-none-any.whl", hash = "sha256:ecaca67aed704a338f88f67b1181b58f821ab5dc89c1f0f5ef99db43c1c2921e", size = 139808, upload-time = "2025-11-01T21:18:10.956Z" },
|
|
979
|
+
]
|
|
980
|
+
|
|
964
981
|
[[package]]
|
|
965
982
|
name = "jedi"
|
|
966
983
|
version = "0.19.2"
|
|
@@ -1168,6 +1185,15 @@ wheels = [
|
|
|
1168
1185
|
{ url = "https://files.pythonhosted.org/packages/b1/dd/ead9d8ea85bf202d90cc513b533f9c363121c7792674f78e0d8a854b63b4/jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780", size = 15884, upload-time = "2023-11-23T09:26:34.325Z" },
|
|
1169
1186
|
]
|
|
1170
1187
|
|
|
1188
|
+
[[package]]
|
|
1189
|
+
name = "jupyterlab-widgets"
|
|
1190
|
+
version = "3.0.16"
|
|
1191
|
+
source = { registry = "https://pypi.org/simple" }
|
|
1192
|
+
sdist = { url = "https://files.pythonhosted.org/packages/26/2d/ef58fed122b268c69c0aa099da20bc67657cdfb2e222688d5731bd5b971d/jupyterlab_widgets-3.0.16.tar.gz", hash = "sha256:423da05071d55cf27a9e602216d35a3a65a3e41cdf9c5d3b643b814ce38c19e0", size = 897423, upload-time = "2025-11-01T21:11:29.724Z" }
|
|
1193
|
+
wheels = [
|
|
1194
|
+
{ url = "https://files.pythonhosted.org/packages/ab/b5/36c712098e6191d1b4e349304ef73a8d06aed77e56ceaac8c0a306c7bda1/jupyterlab_widgets-3.0.16-py3-none-any.whl", hash = "sha256:45fa36d9c6422cf2559198e4db481aa243c7a32d9926b500781c830c80f7ecf8", size = 914926, upload-time = "2025-11-01T21:11:28.008Z" },
|
|
1195
|
+
]
|
|
1196
|
+
|
|
1171
1197
|
[[package]]
|
|
1172
1198
|
name = "langchain-classic"
|
|
1173
1199
|
version = "1.0.1"
|
|
@@ -1232,7 +1258,7 @@ wheels = [
|
|
|
1232
1258
|
|
|
1233
1259
|
[[package]]
|
|
1234
1260
|
name = "langchain-kinetica"
|
|
1235
|
-
version = "1.
|
|
1261
|
+
version = "1.2.0"
|
|
1236
1262
|
source = { editable = "." }
|
|
1237
1263
|
dependencies = [
|
|
1238
1264
|
{ name = "gpudb", extra = ["dataframe"] },
|
|
@@ -1246,6 +1272,7 @@ dev = [
|
|
|
1246
1272
|
{ name = "build" },
|
|
1247
1273
|
{ name = "faker" },
|
|
1248
1274
|
{ name = "ipykernel" },
|
|
1275
|
+
{ name = "ipywidgets" },
|
|
1249
1276
|
{ name = "langchain-community" },
|
|
1250
1277
|
{ name = "langchain-openai" },
|
|
1251
1278
|
{ name = "langchain-text-splitters" },
|
|
@@ -1258,6 +1285,7 @@ dev = [
|
|
|
1258
1285
|
]
|
|
1259
1286
|
jupyter = [
|
|
1260
1287
|
{ name = "ipykernel" },
|
|
1288
|
+
{ name = "ipywidgets" },
|
|
1261
1289
|
{ name = "nbconvert" },
|
|
1262
1290
|
]
|
|
1263
1291
|
lint = [
|
|
@@ -1290,6 +1318,7 @@ dev = [
|
|
|
1290
1318
|
{ name = "build", specifier = ">=1.4.0" },
|
|
1291
1319
|
{ name = "faker", specifier = ">=40.1.2" },
|
|
1292
1320
|
{ name = "ipykernel", specifier = ">=7.1.0" },
|
|
1321
|
+
{ name = "ipywidgets", specifier = ">=8.1.8" },
|
|
1293
1322
|
{ name = "langchain-community", specifier = ">=0.4.1" },
|
|
1294
1323
|
{ name = "langchain-openai", specifier = ">=1.1.7" },
|
|
1295
1324
|
{ name = "langchain-text-splitters", specifier = ">=1.1.0" },
|
|
@@ -1302,6 +1331,7 @@ dev = [
|
|
|
1302
1331
|
]
|
|
1303
1332
|
jupyter = [
|
|
1304
1333
|
{ name = "ipykernel", specifier = ">=7.1.0" },
|
|
1334
|
+
{ name = "ipywidgets", specifier = ">=8.1.8" },
|
|
1305
1335
|
{ name = "nbconvert", specifier = ">=7.17.0" },
|
|
1306
1336
|
]
|
|
1307
1337
|
lint = [{ name = "ruff", specifier = ">=0.13.1,<0.14.0" }]
|
|
@@ -3448,6 +3478,15 @@ wheels = [
|
|
|
3448
3478
|
{ url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" },
|
|
3449
3479
|
]
|
|
3450
3480
|
|
|
3481
|
+
[[package]]
|
|
3482
|
+
name = "widgetsnbextension"
|
|
3483
|
+
version = "4.0.15"
|
|
3484
|
+
source = { registry = "https://pypi.org/simple" }
|
|
3485
|
+
sdist = { url = "https://files.pythonhosted.org/packages/bd/f4/c67440c7fb409a71b7404b7aefcd7569a9c0d6bd071299bf4198ae7a5d95/widgetsnbextension-4.0.15.tar.gz", hash = "sha256:de8610639996f1567952d763a5a41af8af37f2575a41f9852a38f947eb82a3b9", size = 1097402, upload-time = "2025-11-01T21:15:55.178Z" }
|
|
3486
|
+
wheels = [
|
|
3487
|
+
{ url = "https://files.pythonhosted.org/packages/3f/0e/fa3b193432cfc60c93b42f3be03365f5f909d2b3ea410295cf36df739e31/widgetsnbextension-4.0.15-py3-none-any.whl", hash = "sha256:8156704e4346a571d9ce73b84bee86a29906c9abfd7223b7228a28899ccf3366", size = 2196503, upload-time = "2025-11-01T21:15:53.565Z" },
|
|
3488
|
+
]
|
|
3489
|
+
|
|
3451
3490
|
[[package]]
|
|
3452
3491
|
name = "xxhash"
|
|
3453
3492
|
version = "3.6.0"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/langchain_kinetica/document_loaders.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/tests/integration_tests/test_vectorstores.py
RENAMED
|
File without changes
|
|
File without changes
|
{langchain_kinetica-1.1.0 → langchain_kinetica-1.2.0}/tests/unit_tests/test_unit_chat_models.py
RENAMED
|
File without changes
|