cognee 0.3.9__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognee/infrastructure/databases/vector/embeddings/OllamaEmbeddingEngine.py +4 -1
- cognee/modules/retrieval/cypher_search_retriever.py +3 -1
- cognee/tests/integration/web_url_crawler/test_default_url_crawler.py +1 -1
- cognee/tests/integration/web_url_crawler/test_tavily_crawler.py +1 -1
- cognee/tests/integration/web_url_crawler/test_url_adding_e2e.py +13 -27
- {cognee-0.3.9.dist-info → cognee-0.4.1.dist-info}/METADATA +71 -86
- {cognee-0.3.9.dist-info → cognee-0.4.1.dist-info}/RECORD +11 -11
- {cognee-0.3.9.dist-info → cognee-0.4.1.dist-info}/WHEEL +1 -1
- {cognee-0.3.9.dist-info → cognee-0.4.1.dist-info}/entry_points.txt +0 -0
- {cognee-0.3.9.dist-info → cognee-0.4.1.dist-info}/licenses/LICENSE +0 -0
- {cognee-0.3.9.dist-info → cognee-0.4.1.dist-info}/licenses/NOTICE.md +0 -0
|
@@ -124,7 +124,10 @@ class OllamaEmbeddingEngine(EmbeddingEngine):
|
|
|
124
124
|
self.endpoint, json=payload, headers=headers, timeout=60.0
|
|
125
125
|
) as response:
|
|
126
126
|
data = await response.json()
|
|
127
|
-
|
|
127
|
+
if "embeddings" in data:
|
|
128
|
+
return data["embeddings"][0]
|
|
129
|
+
else:
|
|
130
|
+
return data["data"][0]["embedding"]
|
|
128
131
|
|
|
129
132
|
def get_vector_size(self) -> int:
|
|
130
133
|
"""
|
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
from typing import Any, Optional
|
|
2
|
+
from fastapi.encoders import jsonable_encoder
|
|
3
|
+
|
|
2
4
|
from cognee.infrastructure.databases.graph import get_graph_engine
|
|
3
5
|
from cognee.modules.retrieval.base_retriever import BaseRetriever
|
|
4
6
|
from cognee.modules.retrieval.utils.completion import generate_completion
|
|
@@ -50,7 +52,7 @@ class CypherSearchRetriever(BaseRetriever):
|
|
|
50
52
|
logger.warning("Search attempt on an empty knowledge graph")
|
|
51
53
|
return []
|
|
52
54
|
|
|
53
|
-
result = await graph_engine.query(query)
|
|
55
|
+
result = jsonable_encoder(await graph_engine.query(query))
|
|
54
56
|
except Exception as e:
|
|
55
57
|
logger.error("Failed to execture cypher search retrieval: %s", str(e))
|
|
56
58
|
raise CypherSearchError() from e
|
|
@@ -5,7 +5,7 @@ from cognee.tasks.web_scraper import DefaultUrlCrawler
|
|
|
5
5
|
@pytest.mark.asyncio
|
|
6
6
|
async def test_fetch():
|
|
7
7
|
crawler = DefaultUrlCrawler()
|
|
8
|
-
url = "
|
|
8
|
+
url = "http://example.com/"
|
|
9
9
|
results = await crawler.fetch_urls(url)
|
|
10
10
|
assert len(results) == 1
|
|
11
11
|
assert isinstance(results, dict)
|
|
@@ -11,7 +11,7 @@ skip_in_ci = pytest.mark.skipif(
|
|
|
11
11
|
@skip_in_ci
|
|
12
12
|
@pytest.mark.asyncio
|
|
13
13
|
async def test_fetch():
|
|
14
|
-
url = "
|
|
14
|
+
url = "http://example.com/"
|
|
15
15
|
results = await fetch_with_tavily(url)
|
|
16
16
|
assert isinstance(results, dict)
|
|
17
17
|
assert len(results) == 1
|
|
@@ -14,9 +14,7 @@ async def test_url_saves_as_html_file():
|
|
|
14
14
|
await cognee.prune.prune_system(metadata=True)
|
|
15
15
|
|
|
16
16
|
try:
|
|
17
|
-
original_file_path = await save_data_item_to_storage(
|
|
18
|
-
"https://en.wikipedia.org/wiki/Large_language_model"
|
|
19
|
-
)
|
|
17
|
+
original_file_path = await save_data_item_to_storage("http://example.com/")
|
|
20
18
|
file_path = get_data_file_path(original_file_path)
|
|
21
19
|
assert file_path.endswith(".html")
|
|
22
20
|
file = Path(file_path)
|
|
@@ -44,9 +42,7 @@ async def test_saved_html_is_valid():
|
|
|
44
42
|
await cognee.prune.prune_system(metadata=True)
|
|
45
43
|
|
|
46
44
|
try:
|
|
47
|
-
original_file_path = await save_data_item_to_storage(
|
|
48
|
-
"https://en.wikipedia.org/wiki/Large_language_model"
|
|
49
|
-
)
|
|
45
|
+
original_file_path = await save_data_item_to_storage("http://example.com/")
|
|
50
46
|
file_path = get_data_file_path(original_file_path)
|
|
51
47
|
content = Path(file_path).read_text()
|
|
52
48
|
|
|
@@ -72,7 +68,7 @@ async def test_add_url():
|
|
|
72
68
|
await cognee.prune.prune_data()
|
|
73
69
|
await cognee.prune.prune_system(metadata=True)
|
|
74
70
|
|
|
75
|
-
await cognee.add("
|
|
71
|
+
await cognee.add("http://example.com/")
|
|
76
72
|
|
|
77
73
|
|
|
78
74
|
skip_in_ci = pytest.mark.skipif(
|
|
@@ -88,7 +84,7 @@ async def test_add_url_with_tavily():
|
|
|
88
84
|
await cognee.prune.prune_data()
|
|
89
85
|
await cognee.prune.prune_system(metadata=True)
|
|
90
86
|
|
|
91
|
-
await cognee.add("
|
|
87
|
+
await cognee.add("http://example.com/")
|
|
92
88
|
|
|
93
89
|
|
|
94
90
|
@pytest.mark.asyncio
|
|
@@ -98,7 +94,7 @@ async def test_add_url_without_incremental_loading():
|
|
|
98
94
|
|
|
99
95
|
try:
|
|
100
96
|
await cognee.add(
|
|
101
|
-
"
|
|
97
|
+
"http://example.com/",
|
|
102
98
|
incremental_loading=False,
|
|
103
99
|
)
|
|
104
100
|
except Exception as e:
|
|
@@ -112,7 +108,7 @@ async def test_add_url_with_incremental_loading():
|
|
|
112
108
|
|
|
113
109
|
try:
|
|
114
110
|
await cognee.add(
|
|
115
|
-
"
|
|
111
|
+
"http://example.com/",
|
|
116
112
|
incremental_loading=True,
|
|
117
113
|
)
|
|
118
114
|
except Exception as e:
|
|
@@ -125,7 +121,7 @@ async def test_add_url_can_define_preferred_loader_as_list_of_str():
|
|
|
125
121
|
await cognee.prune.prune_system(metadata=True)
|
|
126
122
|
|
|
127
123
|
await cognee.add(
|
|
128
|
-
"
|
|
124
|
+
"http://example.com/",
|
|
129
125
|
preferred_loaders=["beautiful_soup_loader"],
|
|
130
126
|
)
|
|
131
127
|
|
|
@@ -144,7 +140,7 @@ async def test_add_url_with_extraction_rules():
|
|
|
144
140
|
|
|
145
141
|
try:
|
|
146
142
|
await cognee.add(
|
|
147
|
-
"
|
|
143
|
+
"http://example.com/",
|
|
148
144
|
preferred_loaders={"beautiful_soup_loader": {"extraction_rules": extraction_rules}},
|
|
149
145
|
)
|
|
150
146
|
except Exception as e:
|
|
@@ -163,9 +159,7 @@ async def test_loader_is_none_by_default():
|
|
|
163
159
|
}
|
|
164
160
|
|
|
165
161
|
try:
|
|
166
|
-
original_file_path = await save_data_item_to_storage(
|
|
167
|
-
"https://en.wikipedia.org/wiki/Large_language_model"
|
|
168
|
-
)
|
|
162
|
+
original_file_path = await save_data_item_to_storage("http://example.com/")
|
|
169
163
|
file_path = get_data_file_path(original_file_path)
|
|
170
164
|
assert file_path.endswith(".html")
|
|
171
165
|
file = Path(file_path)
|
|
@@ -196,9 +190,7 @@ async def test_beautiful_soup_loader_is_selected_loader_if_preferred_loader_prov
|
|
|
196
190
|
}
|
|
197
191
|
|
|
198
192
|
try:
|
|
199
|
-
original_file_path = await save_data_item_to_storage(
|
|
200
|
-
"https://en.wikipedia.org/wiki/Large_language_model"
|
|
201
|
-
)
|
|
193
|
+
original_file_path = await save_data_item_to_storage("http://example.com/")
|
|
202
194
|
file_path = get_data_file_path(original_file_path)
|
|
203
195
|
assert file_path.endswith(".html")
|
|
204
196
|
file = Path(file_path)
|
|
@@ -225,9 +217,7 @@ async def test_beautiful_soup_loader_works_with_and_without_arguments():
|
|
|
225
217
|
await cognee.prune.prune_system(metadata=True)
|
|
226
218
|
|
|
227
219
|
try:
|
|
228
|
-
original_file_path = await save_data_item_to_storage(
|
|
229
|
-
"https://en.wikipedia.org/wiki/Large_language_model"
|
|
230
|
-
)
|
|
220
|
+
original_file_path = await save_data_item_to_storage("http://example.com/")
|
|
231
221
|
file_path = get_data_file_path(original_file_path)
|
|
232
222
|
assert file_path.endswith(".html")
|
|
233
223
|
file = Path(file_path)
|
|
@@ -263,9 +253,7 @@ async def test_beautiful_soup_loader_successfully_loads_file_if_required_args_pr
|
|
|
263
253
|
await cognee.prune.prune_system(metadata=True)
|
|
264
254
|
|
|
265
255
|
try:
|
|
266
|
-
original_file_path = await save_data_item_to_storage(
|
|
267
|
-
"https://en.wikipedia.org/wiki/Large_language_model"
|
|
268
|
-
)
|
|
256
|
+
original_file_path = await save_data_item_to_storage("http://example.com/")
|
|
269
257
|
file_path = get_data_file_path(original_file_path)
|
|
270
258
|
assert file_path.endswith(".html")
|
|
271
259
|
file = Path(file_path)
|
|
@@ -302,9 +290,7 @@ async def test_beautiful_soup_loads_file_successfully():
|
|
|
302
290
|
}
|
|
303
291
|
|
|
304
292
|
try:
|
|
305
|
-
original_file_path = await save_data_item_to_storage(
|
|
306
|
-
"https://en.wikipedia.org/wiki/Large_language_model"
|
|
307
|
-
)
|
|
293
|
+
original_file_path = await save_data_item_to_storage("http://example.com/")
|
|
308
294
|
file_path = get_data_file_path(original_file_path)
|
|
309
295
|
assert file_path.endswith(".html")
|
|
310
296
|
original_file = Path(file_path)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cognee
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.1
|
|
4
4
|
Summary: Cognee - is a library for enriching LLM context with a semantic layer for better understanding and reasoning.
|
|
5
5
|
Project-URL: Homepage, https://www.cognee.ai
|
|
6
6
|
Project-URL: Repository, https://github.com/topoteretes/cognee
|
|
@@ -28,7 +28,8 @@ Requires-Dist: gunicorn<24,>=20.1.0
|
|
|
28
28
|
Requires-Dist: instructor<2.0.0,>=1.9.1
|
|
29
29
|
Requires-Dist: jinja2<4,>=3.1.3
|
|
30
30
|
Requires-Dist: kuzu==0.11.3
|
|
31
|
-
Requires-Dist:
|
|
31
|
+
Requires-Dist: lance-namespace<=0.0.21
|
|
32
|
+
Requires-Dist: lancedb<=0.25.3,>=0.24.0
|
|
32
33
|
Requires-Dist: limits<5,>=4.4.1
|
|
33
34
|
Requires-Dist: litellm>=1.76.0
|
|
34
35
|
Requires-Dist: mistralai>=1.9.10
|
|
@@ -156,27 +157,27 @@ Description-Content-Type: text/markdown
|
|
|
156
157
|
|
|
157
158
|
<br />
|
|
158
159
|
|
|
159
|
-
|
|
160
|
+
Cognee - Accurate and Persistent AI Memory
|
|
160
161
|
|
|
161
162
|
<p align="center">
|
|
162
163
|
<a href="https://www.youtube.com/watch?v=1bezuvLwJmw&t=2s">Demo</a>
|
|
163
164
|
.
|
|
164
|
-
<a href="https://cognee.ai">
|
|
165
|
+
<a href="https://docs.cognee.ai/">Docs</a>
|
|
166
|
+
.
|
|
167
|
+
<a href="https://cognee.ai">Learn More</a>
|
|
165
168
|
·
|
|
166
169
|
<a href="https://discord.gg/NQPKmU5CCg">Join Discord</a>
|
|
167
170
|
·
|
|
168
171
|
<a href="https://www.reddit.com/r/AIMemory/">Join r/AIMemory</a>
|
|
169
172
|
.
|
|
170
|
-
<a href="https://
|
|
171
|
-
.
|
|
172
|
-
<a href="https://github.com/topoteretes/cognee-community">cognee community repo</a>
|
|
173
|
+
<a href="https://github.com/topoteretes/cognee-community">Community Plugins & Add-ons</a>
|
|
173
174
|
</p>
|
|
174
175
|
|
|
175
176
|
|
|
176
177
|
[](https://GitHub.com/topoteretes/cognee/network/)
|
|
177
178
|
[](https://GitHub.com/topoteretes/cognee/stargazers/)
|
|
178
179
|
[](https://GitHub.com/topoteretes/cognee/commit/)
|
|
179
|
-
[](https://github.com/topoteretes/cognee/tags/)
|
|
180
181
|
[](https://pepy.tech/project/cognee)
|
|
181
182
|
[](https://github.com/topoteretes/cognee/blob/main/LICENSE)
|
|
182
183
|
[](https://github.com/topoteretes/cognee/graphs/contributors)
|
|
@@ -192,11 +193,7 @@ Description-Content-Type: text/markdown
|
|
|
192
193
|
</a>
|
|
193
194
|
</p>
|
|
194
195
|
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
Build dynamic memory for Agents and replace RAG using scalable, modular ECL (Extract, Cognify, Load) pipelines.
|
|
196
|
+
Use your data to build personalized and dynamic memory for AI Agents. Cognee lets you replace RAG with scalable and modular ECL (Extract, Cognify, Load) pipelines.
|
|
200
197
|
|
|
201
198
|
<p align="center">
|
|
202
199
|
🌐 Available Languages
|
|
@@ -204,7 +201,7 @@ Build dynamic memory for Agents and replace RAG using scalable, modular ECL (Ext
|
|
|
204
201
|
<!-- Keep these links. Translations will automatically update with the README. -->
|
|
205
202
|
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=de">Deutsch</a> |
|
|
206
203
|
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=es">Español</a> |
|
|
207
|
-
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=fr">
|
|
204
|
+
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=fr">Français</a> |
|
|
208
205
|
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=ja">日本語</a> |
|
|
209
206
|
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=ko">한국어</a> |
|
|
210
207
|
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=pt">Português</a> |
|
|
@@ -218,69 +215,65 @@ Build dynamic memory for Agents and replace RAG using scalable, modular ECL (Ext
|
|
|
218
215
|
</div>
|
|
219
216
|
</div>
|
|
220
217
|
|
|
218
|
+
## About Cognee
|
|
221
219
|
|
|
220
|
+
Cognee is an open-source tool and platform that transforms your raw data into persistent and dynamic AI memory for Agents. It combines vector search with graph databases to make your documents both searchable by meaning and connected by relationships.
|
|
222
221
|
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
Get started quickly with a Google Colab <a href="https://colab.research.google.com/drive/12Vi9zID-M3fpKpKiaqDBvkk98ElkRPWy?usp=sharing">notebook</a> , <a href="https://deepnote.com/workspace/cognee-382213d0-0444-4c89-8265-13770e333c02/project/cognee-demo-78ffacb9-5832-4611-bb1a-560386068b30/notebook/Notebook-1-75b24cda566d4c24ab348f7150792601?utm_source=share-modal&utm_medium=product-shared-content&utm_campaign=notebook&utm_content=78ffacb9-5832-4611-bb1a-560386068b30">Deepnote notebook</a> or <a href="https://github.com/topoteretes/cognee/tree/main/cognee-starter-kit">starter repo</a>
|
|
226
|
-
|
|
222
|
+
You can use Cognee in two ways:
|
|
227
223
|
|
|
228
|
-
|
|
224
|
+
1. [Self-host Cognee Open Source](https://docs.cognee.ai/getting-started/installation), which stores all data locally by default.
|
|
225
|
+
2. [Connect to Cognee Cloud](https://platform.cognee.ai/), and get the same OSS stack on managed infrastructure for easier development and productionization.
|
|
229
226
|
|
|
230
|
-
|
|
231
|
-
Our hosted solution is just our deployment of OSS cognee on Modal, with the goal of making development and productionization easier.
|
|
227
|
+
### Cognee Open Source (self-hosted):
|
|
232
228
|
|
|
233
|
-
|
|
229
|
+
- Interconnects any type of data — including past conversations, files, images, and audio transcriptions
|
|
230
|
+
- Replaces traditional RAG systems with a unified memory layer built on graphs and vectors
|
|
231
|
+
- Reduces developer effort and infrastructure cost while improving quality and precision
|
|
232
|
+
- Provides Pythonic data pipelines for ingestion from 30+ data sources
|
|
233
|
+
- Offers high customizability through user-defined tasks, modular pipelines, and built-in search endpoints
|
|
234
234
|
|
|
235
|
-
|
|
236
|
-
-
|
|
237
|
-
-
|
|
238
|
-
-
|
|
239
|
-
-
|
|
235
|
+
### Cognee Cloud (managed):
|
|
236
|
+
- Hosted web UI dashboard
|
|
237
|
+
- Automatic version updates
|
|
238
|
+
- Resource usage analytics
|
|
239
|
+
- GDPR compliant, enterprise-grade security
|
|
240
240
|
|
|
241
|
-
|
|
242
|
-
- Includes a managed UI and a [hosted solution](https://www.cognee.ai)
|
|
241
|
+
## Basic Usage & Feature Guide
|
|
243
242
|
|
|
243
|
+
To learn more, [check out this short, end-to-end Colab walkthrough](https://colab.research.google.com/drive/12Vi9zID-M3fpKpKiaqDBvkk98ElkRPWy?usp=sharing) of Cognee's core features.
|
|
244
244
|
|
|
245
|
+
[](https://colab.research.google.com/drive/12Vi9zID-M3fpKpKiaqDBvkk98ElkRPWy?usp=sharing)
|
|
245
246
|
|
|
246
|
-
##
|
|
247
|
+
## Quickstart
|
|
247
248
|
|
|
249
|
+
Let’s try Cognee in just a few lines of code. For detailed setup and configuration, see the [Cognee Docs](https://docs.cognee.ai/getting-started/installation#environment-configuration).
|
|
248
250
|
|
|
249
|
-
###
|
|
251
|
+
### Prerequisites
|
|
250
252
|
|
|
251
|
-
|
|
253
|
+
- Python 3.10 to 3.13
|
|
252
254
|
|
|
253
|
-
|
|
255
|
+
### Step 1: Install Cognee
|
|
254
256
|
|
|
255
|
-
|
|
257
|
+
You can install Cognee with **pip**, **poetry**, **uv**, or your preferred Python package manager.
|
|
256
258
|
|
|
257
259
|
```bash
|
|
258
260
|
uv pip install cognee
|
|
259
261
|
```
|
|
260
262
|
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
### 💻 Basic Usage
|
|
264
|
-
|
|
265
|
-
#### Setup
|
|
266
|
-
|
|
267
|
-
```
|
|
263
|
+
### Step 2: Configure the LLM
|
|
264
|
+
```python
|
|
268
265
|
import os
|
|
269
266
|
os.environ["LLM_API_KEY"] = "YOUR OPENAI_API_KEY"
|
|
270
|
-
|
|
271
267
|
```
|
|
268
|
+
Alternatively, create a `.env` file using our [template](https://github.com/topoteretes/cognee/blob/main/.env.template).
|
|
272
269
|
|
|
273
|
-
|
|
274
|
-
To use different LLM providers, for more info check out our <a href="https://docs.cognee.ai/setup-configuration/llm-providers">documentation</a>
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
#### Simple example
|
|
270
|
+
To integrate other LLM providers, see our [LLM Provider Documentation](https://docs.cognee.ai/setup-configuration/llm-providers).
|
|
278
271
|
|
|
272
|
+
### Step 3: Run the Pipeline
|
|
279
273
|
|
|
274
|
+
Cognee will take your documents, generate a knowledge graph from them and then query the graph based on combined relationships.
|
|
280
275
|
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
This script will run the default pipeline:
|
|
276
|
+
Now, run a minimal pipeline:
|
|
284
277
|
|
|
285
278
|
```python
|
|
286
279
|
import cognee
|
|
@@ -298,7 +291,7 @@ async def main():
|
|
|
298
291
|
await cognee.memify()
|
|
299
292
|
|
|
300
293
|
# Query the knowledge graph
|
|
301
|
-
results = await cognee.search("What does
|
|
294
|
+
results = await cognee.search("What does Cognee do?")
|
|
302
295
|
|
|
303
296
|
# Display the results
|
|
304
297
|
for result in results:
|
|
@@ -309,69 +302,61 @@ if __name__ == '__main__':
|
|
|
309
302
|
asyncio.run(main())
|
|
310
303
|
|
|
311
304
|
```
|
|
312
|
-
Example output:
|
|
313
|
-
```
|
|
314
|
-
Cognee turns documents into AI memory.
|
|
315
305
|
|
|
306
|
+
As you can see, the output is generated from the document we previously stored in Cognee:
|
|
307
|
+
|
|
308
|
+
```bash
|
|
309
|
+
Cognee turns documents into AI memory.
|
|
316
310
|
```
|
|
317
|
-
##### Via CLI
|
|
318
311
|
|
|
319
|
-
|
|
312
|
+
### Use the Cognee CLI
|
|
320
313
|
|
|
321
|
-
|
|
314
|
+
As an alternative, you can get started with these essential commands:
|
|
315
|
+
|
|
316
|
+
```bash
|
|
322
317
|
cognee-cli add "Cognee turns documents into AI memory."
|
|
323
318
|
|
|
324
319
|
cognee-cli cognify
|
|
325
320
|
|
|
326
|
-
cognee-cli search "What does
|
|
321
|
+
cognee-cli search "What does Cognee do?"
|
|
327
322
|
cognee-cli delete --all
|
|
328
323
|
|
|
329
324
|
```
|
|
330
|
-
|
|
331
|
-
|
|
325
|
+
|
|
326
|
+
To open the local UI, run:
|
|
327
|
+
```bash
|
|
332
328
|
cognee-cli -ui
|
|
333
329
|
```
|
|
334
330
|
|
|
331
|
+
## Demos & Examples
|
|
335
332
|
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
### Hosted Platform
|
|
340
|
-
|
|
341
|
-
Get up and running in minutes with automatic updates, analytics, and enterprise security.
|
|
342
|
-
|
|
343
|
-
1. Sign up on [cogwit](https://www.cognee.ai)
|
|
344
|
-
2. Add your API key to local UI and sync your data to Cogwit
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
## Demos
|
|
333
|
+
See Cognee in action:
|
|
350
334
|
|
|
351
|
-
|
|
335
|
+
### Persistent Agent Memory
|
|
352
336
|
|
|
353
|
-
[
|
|
337
|
+
[Cognee Memory for LangGraph Agents](https://github.com/user-attachments/assets/e113b628-7212-4a2b-b288-0be39a93a1c3)
|
|
354
338
|
|
|
355
|
-
|
|
339
|
+
### Simple GraphRAG
|
|
356
340
|
|
|
357
|
-
[
|
|
341
|
+
[Watch Demo](https://github.com/user-attachments/assets/f2186b2e-305a-42b0-9c2d-9f4473f15df8)
|
|
358
342
|
|
|
359
|
-
|
|
343
|
+
### Cognee with Ollama
|
|
360
344
|
|
|
361
|
-
[
|
|
345
|
+
[Watch Demo](https://github.com/user-attachments/assets/39672858-f774-4136-b957-1e2de67b8981)
|
|
362
346
|
|
|
363
347
|
|
|
364
|
-
##
|
|
365
|
-
Your contributions are at the core of making this a true open source project. Any contributions you make are **greatly appreciated**. See [`CONTRIBUTING.md`](CONTRIBUTING.md) for more information.
|
|
348
|
+
## Community & Support
|
|
366
349
|
|
|
350
|
+
### Contributing
|
|
351
|
+
We welcome contributions from the community! Your input helps make Cognee better for everyone. See [`CONTRIBUTING.md`](CONTRIBUTING.md) to get started.
|
|
367
352
|
|
|
368
|
-
|
|
353
|
+
### Code of Conduct
|
|
369
354
|
|
|
370
|
-
We
|
|
355
|
+
We're committed to fostering an inclusive and respectful community. Read our [Code of Conduct](https://github.com/topoteretes/cognee/blob/main/CODE_OF_CONDUCT.md) for guidelines.
|
|
371
356
|
|
|
372
|
-
## Citation
|
|
357
|
+
## Research & Citation
|
|
373
358
|
|
|
374
|
-
We
|
|
359
|
+
We recently published a research paper on optimizing knowledge graphs for LLM reasoning:
|
|
375
360
|
|
|
376
361
|
```bibtex
|
|
377
362
|
@misc{markovic2025optimizinginterfaceknowledgegraphs,
|
|
@@ -204,7 +204,7 @@ cognee/infrastructure/databases/vector/chromadb/__init__.py,sha256=47DEQpj8HBSa-
|
|
|
204
204
|
cognee/infrastructure/databases/vector/embeddings/EmbeddingEngine.py,sha256=I-FXxTSRtb0y00U5eJr2o8n4j4DcC3_mEjEya70BPQU,1158
|
|
205
205
|
cognee/infrastructure/databases/vector/embeddings/FastembedEmbeddingEngine.py,sha256=r1NTOo2aMwjbb9-yfHCU_IS-VZ9p3ZdRGRKWZmcIpso,4521
|
|
206
206
|
cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py,sha256=_rSMGNPjjfrV7Xr2xZWvs1RPRVF1nj-1nlBk0cGgh9A,8321
|
|
207
|
-
cognee/infrastructure/databases/vector/embeddings/OllamaEmbeddingEngine.py,sha256=
|
|
207
|
+
cognee/infrastructure/databases/vector/embeddings/OllamaEmbeddingEngine.py,sha256=QGU9BpzlTJyWESvjUiFOuAOjkj2LCJTq9FOizPzIi00,5084
|
|
208
208
|
cognee/infrastructure/databases/vector/embeddings/__init__.py,sha256=Akv-ShdXjHw-BE00Gw55GgGxIMr0SZ9FHi3RlpsJmiE,55
|
|
209
209
|
cognee/infrastructure/databases/vector/embeddings/config.py,sha256=w7zaQEBNjnYXQi2N5gTCIooDzwGI3HCyyeWt-Q5WIKw,2539
|
|
210
210
|
cognee/infrastructure/databases/vector/embeddings/embedding_rate_limiter.py,sha256=TyCoo_SipQ6JNy5eqXY2shrZnhb2JVjt9xOsJltOCdw,17598
|
|
@@ -569,7 +569,7 @@ cognee/modules/retrieval/chunks_retriever.py,sha256=ntsF2mtCBIAt3c9a_tRd8MVJbxlQ
|
|
|
569
569
|
cognee/modules/retrieval/code_retriever.py,sha256=-U9sEX-3IAeH34o7tHlcBwDt2EEFlLNbXx9mh6jvPWI,9766
|
|
570
570
|
cognee/modules/retrieval/coding_rules_retriever.py,sha256=3GU259jTbGLqmp_A8sUdE4fyf0td06SKuxBJVW-npIQ,1134
|
|
571
571
|
cognee/modules/retrieval/completion_retriever.py,sha256=armrabXj84Sz_0DLXQR9A1VFU43AFoYdaxITn9tLeuQ,5353
|
|
572
|
-
cognee/modules/retrieval/cypher_search_retriever.py,sha256=
|
|
572
|
+
cognee/modules/retrieval/cypher_search_retriever.py,sha256=bDdJbw2icQeE1h24TtROOGWcCTAoGa7Ng-YPjBVZjZk,2888
|
|
573
573
|
cognee/modules/retrieval/graph_completion_context_extension_retriever.py,sha256=CigoPl2kZqlJzBrWvlozVd9wb-SZERzcSv6B1TUj6b8,6134
|
|
574
574
|
cognee/modules/retrieval/graph_completion_cot_retriever.py,sha256=YKAdpDtrZdGHm_ZMHd8bFkbvgKF0FaDtMWljvVS84bI,11052
|
|
575
575
|
cognee/modules/retrieval/graph_completion_retriever.py,sha256=-pk66LH6IhUfiSmLbCbpMkpQxHFvijd7vRQ4Ax8AEVs,10420
|
|
@@ -849,9 +849,9 @@ cognee/tests/integration/documents/PdfDocument_test.py,sha256=IY0Cck8J2gEyuJHPK0
|
|
|
849
849
|
cognee/tests/integration/documents/TextDocument_test.py,sha256=aSYfyvSQLceZ1c5NqV5Jf5eGA3BL_adP6iwWnT9eMCg,2159
|
|
850
850
|
cognee/tests/integration/documents/UnstructuredDocument_test.py,sha256=nZktosptjw85V1_2iAwlOaYghA4cmqEX62RvQSgU_NY,4006
|
|
851
851
|
cognee/tests/integration/documents/async_gen_zip.py,sha256=h98Q6cxhwb49iaYm4NZ-GmbNDAux-BKplofNgf4aIpc,317
|
|
852
|
-
cognee/tests/integration/web_url_crawler/test_default_url_crawler.py,sha256=
|
|
853
|
-
cognee/tests/integration/web_url_crawler/test_tavily_crawler.py,sha256=
|
|
854
|
-
cognee/tests/integration/web_url_crawler/test_url_adding_e2e.py,sha256=
|
|
852
|
+
cognee/tests/integration/web_url_crawler/test_default_url_crawler.py,sha256=Qk__D7-SwpE5YfCiXoIDF3LgablRMhtoSSGfCVYY-PM,349
|
|
853
|
+
cognee/tests/integration/web_url_crawler/test_tavily_crawler.py,sha256=tro2Isg-zqEEkD03oCWzYV8n5KlqGaUP69RcVVaeYDc,493
|
|
854
|
+
cognee/tests/integration/web_url_crawler/test_url_adding_e2e.py,sha256=F1eTCI9Q9kVwOh5DuDf-uVxz3DqZvJj2HGlAiREIX_8,11127
|
|
855
855
|
cognee/tests/subprocesses/reader.py,sha256=NW5zbXhWUcFXyN9RRAW2lzxCvEYV8hno6gBmE18O0b8,954
|
|
856
856
|
cognee/tests/subprocesses/simple_cognify_1.py,sha256=WE2hG50rFwceKNL07PeAYu-Mrs74pjmdPEQrqZiTf8s,869
|
|
857
857
|
cognee/tests/subprocesses/simple_cognify_2.py,sha256=nv0gJZCLn0iwY7SumiGlIiGJc1tFCyiHhAUzw0sjLn8,872
|
|
@@ -942,9 +942,9 @@ distributed/tasks/queued_add_edges.py,sha256=kz1DHE05y-kNHORQJjYWHUi6Q1QWUp_v3Dl
|
|
|
942
942
|
distributed/tasks/queued_add_nodes.py,sha256=aqK4Ij--ADwUWknxYpiwbYrpa6CcvFfqHWbUZW4Kh3A,452
|
|
943
943
|
distributed/workers/data_point_saving_worker.py,sha256=kmaQy2A2J7W3k9Gd5lyoiT0XYOaJmEM8MbkKVOFOQVU,4729
|
|
944
944
|
distributed/workers/graph_saving_worker.py,sha256=b5OPLLUq0OBALGekdp73JKxU0GrMlVbO4AfIhmACKkQ,4724
|
|
945
|
-
cognee-0.
|
|
946
|
-
cognee-0.
|
|
947
|
-
cognee-0.
|
|
948
|
-
cognee-0.
|
|
949
|
-
cognee-0.
|
|
950
|
-
cognee-0.
|
|
945
|
+
cognee-0.4.1.dist-info/METADATA,sha256=Xv_KcVeblOMxrXCaUpcHGbOnEXhwRHiVtidZw1cJHss,15358
|
|
946
|
+
cognee-0.4.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
947
|
+
cognee-0.4.1.dist-info/entry_points.txt,sha256=fAozOD9Vs4kgYwRhBiZoLCIXu-OSZqVxKGv45l19uok,88
|
|
948
|
+
cognee-0.4.1.dist-info/licenses/LICENSE,sha256=pHHjSQj1DD8SDppW88MMs04TPk7eAanL1c5xj8NY7NQ,11344
|
|
949
|
+
cognee-0.4.1.dist-info/licenses/NOTICE.md,sha256=6L3saP3kSpcingOxDh-SGjMS8GY79Rlh2dBNLaO0o5c,339
|
|
950
|
+
cognee-0.4.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|