cognee 0.3.8__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognee/infrastructure/files/utils/get_file_metadata.py +3 -3
- cognee/infrastructure/files/utils/guess_file_type.py +19 -5
- cognee/infrastructure/loaders/core/audio_loader.py +1 -0
- cognee/modules/ingestion/data_types/BinaryData.py +1 -1
- cognee/modules/retrieval/cypher_search_retriever.py +3 -1
- {cognee-0.3.8.dist-info → cognee-0.4.0.dist-info}/METADATA +69 -85
- {cognee-0.3.8.dist-info → cognee-0.4.0.dist-info}/RECORD +11 -11
- {cognee-0.3.8.dist-info → cognee-0.4.0.dist-info}/WHEEL +0 -0
- {cognee-0.3.8.dist-info → cognee-0.4.0.dist-info}/entry_points.txt +0 -0
- {cognee-0.3.8.dist-info → cognee-0.4.0.dist-info}/licenses/LICENSE +0 -0
- {cognee-0.3.8.dist-info → cognee-0.4.0.dist-info}/licenses/NOTICE.md +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import io
|
|
2
2
|
import os.path
|
|
3
|
-
from typing import BinaryIO, TypedDict
|
|
3
|
+
from typing import BinaryIO, TypedDict, Optional
|
|
4
4
|
from pathlib import Path
|
|
5
5
|
|
|
6
6
|
from cognee.shared.logging_utils import get_logger
|
|
@@ -27,7 +27,7 @@ class FileMetadata(TypedDict):
|
|
|
27
27
|
file_size: int
|
|
28
28
|
|
|
29
29
|
|
|
30
|
-
async def get_file_metadata(file: BinaryIO) -> FileMetadata:
|
|
30
|
+
async def get_file_metadata(file: BinaryIO, name: Optional[str] = None) -> FileMetadata:
|
|
31
31
|
"""
|
|
32
32
|
Retrieve metadata from a file object.
|
|
33
33
|
|
|
@@ -53,7 +53,7 @@ async def get_file_metadata(file: BinaryIO) -> FileMetadata:
|
|
|
53
53
|
except io.UnsupportedOperation as error:
|
|
54
54
|
logger.error(f"Error retrieving content hash for file: {file.name} \n{str(error)}\n\n")
|
|
55
55
|
|
|
56
|
-
file_type = guess_file_type(file)
|
|
56
|
+
file_type = guess_file_type(file, name)
|
|
57
57
|
|
|
58
58
|
file_path = getattr(file, "name", None) or getattr(file, "full_name", None)
|
|
59
59
|
|
|
@@ -1,6 +1,9 @@
|
|
|
1
|
-
|
|
1
|
+
import io
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import BinaryIO, Optional, Any
|
|
2
4
|
import filetype
|
|
3
|
-
from
|
|
5
|
+
from tempfile import SpooledTemporaryFile
|
|
6
|
+
from filetype.types.base import Type
|
|
4
7
|
|
|
5
8
|
|
|
6
9
|
class FileTypeException(Exception):
|
|
@@ -22,7 +25,7 @@ class FileTypeException(Exception):
|
|
|
22
25
|
self.message = message
|
|
23
26
|
|
|
24
27
|
|
|
25
|
-
def guess_file_type(file: BinaryIO) -> filetype.Type:
|
|
28
|
+
def guess_file_type(file: BinaryIO, name: Optional[str] = None) -> filetype.Type:
|
|
26
29
|
"""
|
|
27
30
|
Guess the file type from the given binary file stream.
|
|
28
31
|
|
|
@@ -39,12 +42,23 @@ def guess_file_type(file: BinaryIO) -> filetype.Type:
|
|
|
39
42
|
|
|
40
43
|
- filetype.Type: The guessed file type, represented as filetype.Type.
|
|
41
44
|
"""
|
|
45
|
+
|
|
46
|
+
# Note: If file has .txt or .text extension, consider it a plain text file as filetype.guess may not detect it properly
|
|
47
|
+
# as it contains no magic number encoding
|
|
48
|
+
ext = None
|
|
49
|
+
if isinstance(file, str):
|
|
50
|
+
ext = Path(file).suffix
|
|
51
|
+
elif name is not None:
|
|
52
|
+
ext = Path(name).suffix
|
|
53
|
+
|
|
54
|
+
if ext in [".txt", ".text"]:
|
|
55
|
+
file_type = Type("text/plain", "txt")
|
|
56
|
+
return file_type
|
|
57
|
+
|
|
42
58
|
file_type = filetype.guess(file)
|
|
43
59
|
|
|
44
60
|
# If file type could not be determined consider it a plain text file as they don't have magic number encoding
|
|
45
61
|
if file_type is None:
|
|
46
|
-
from filetype.types.base import Type
|
|
47
|
-
|
|
48
62
|
file_type = Type("text/plain", "txt")
|
|
49
63
|
|
|
50
64
|
if file_type is None:
|
|
@@ -30,7 +30,7 @@ class BinaryData(IngestionData):
|
|
|
30
30
|
|
|
31
31
|
async def ensure_metadata(self):
|
|
32
32
|
if self.metadata is None:
|
|
33
|
-
self.metadata = await get_file_metadata(self.data)
|
|
33
|
+
self.metadata = await get_file_metadata(self.data, name=self.name)
|
|
34
34
|
|
|
35
35
|
if self.metadata["name"] is None:
|
|
36
36
|
self.metadata["name"] = self.name
|
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
from typing import Any, Optional
|
|
2
|
+
from fastapi.encoders import jsonable_encoder
|
|
3
|
+
|
|
2
4
|
from cognee.infrastructure.databases.graph import get_graph_engine
|
|
3
5
|
from cognee.modules.retrieval.base_retriever import BaseRetriever
|
|
4
6
|
from cognee.modules.retrieval.utils.completion import generate_completion
|
|
@@ -50,7 +52,7 @@ class CypherSearchRetriever(BaseRetriever):
|
|
|
50
52
|
logger.warning("Search attempt on an empty knowledge graph")
|
|
51
53
|
return []
|
|
52
54
|
|
|
53
|
-
result = await graph_engine.query(query)
|
|
55
|
+
result = jsonable_encoder(await graph_engine.query(query))
|
|
54
56
|
except Exception as e:
|
|
55
57
|
logger.error("Failed to execture cypher search retrieval: %s", str(e))
|
|
56
58
|
raise CypherSearchError() from e
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cognee
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.0
|
|
4
4
|
Summary: Cognee - is a library for enriching LLM context with a semantic layer for better understanding and reasoning.
|
|
5
5
|
Project-URL: Homepage, https://www.cognee.ai
|
|
6
6
|
Project-URL: Repository, https://github.com/topoteretes/cognee
|
|
@@ -156,27 +156,27 @@ Description-Content-Type: text/markdown
|
|
|
156
156
|
|
|
157
157
|
<br />
|
|
158
158
|
|
|
159
|
-
|
|
159
|
+
Cognee - Accurate and Persistent AI Memory
|
|
160
160
|
|
|
161
161
|
<p align="center">
|
|
162
162
|
<a href="https://www.youtube.com/watch?v=1bezuvLwJmw&t=2s">Demo</a>
|
|
163
163
|
.
|
|
164
|
-
<a href="https://cognee.ai">
|
|
164
|
+
<a href="https://docs.cognee.ai/">Docs</a>
|
|
165
|
+
.
|
|
166
|
+
<a href="https://cognee.ai">Learn More</a>
|
|
165
167
|
·
|
|
166
168
|
<a href="https://discord.gg/NQPKmU5CCg">Join Discord</a>
|
|
167
169
|
·
|
|
168
170
|
<a href="https://www.reddit.com/r/AIMemory/">Join r/AIMemory</a>
|
|
169
171
|
.
|
|
170
|
-
<a href="https://
|
|
171
|
-
.
|
|
172
|
-
<a href="https://github.com/topoteretes/cognee-community">cognee community repo</a>
|
|
172
|
+
<a href="https://github.com/topoteretes/cognee-community">Community Plugins & Add-ons</a>
|
|
173
173
|
</p>
|
|
174
174
|
|
|
175
175
|
|
|
176
176
|
[](https://GitHub.com/topoteretes/cognee/network/)
|
|
177
177
|
[](https://GitHub.com/topoteretes/cognee/stargazers/)
|
|
178
178
|
[](https://GitHub.com/topoteretes/cognee/commit/)
|
|
179
|
-
[](https://github.com/topoteretes/cognee/tags/)
|
|
180
180
|
[](https://pepy.tech/project/cognee)
|
|
181
181
|
[](https://github.com/topoteretes/cognee/blob/main/LICENSE)
|
|
182
182
|
[](https://github.com/topoteretes/cognee/graphs/contributors)
|
|
@@ -192,11 +192,7 @@ Description-Content-Type: text/markdown
|
|
|
192
192
|
</a>
|
|
193
193
|
</p>
|
|
194
194
|
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
Build dynamic memory for Agents and replace RAG using scalable, modular ECL (Extract, Cognify, Load) pipelines.
|
|
195
|
+
Use your data to build personalized and dynamic memory for AI Agents. Cognee lets you replace RAG with scalable and modular ECL (Extract, Cognify, Load) pipelines.
|
|
200
196
|
|
|
201
197
|
<p align="center">
|
|
202
198
|
🌐 Available Languages
|
|
@@ -204,7 +200,7 @@ Build dynamic memory for Agents and replace RAG using scalable, modular ECL (Ext
|
|
|
204
200
|
<!-- Keep these links. Translations will automatically update with the README. -->
|
|
205
201
|
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=de">Deutsch</a> |
|
|
206
202
|
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=es">Español</a> |
|
|
207
|
-
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=fr">
|
|
203
|
+
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=fr">Français</a> |
|
|
208
204
|
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=ja">日本語</a> |
|
|
209
205
|
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=ko">한국어</a> |
|
|
210
206
|
<a href="https://www.readme-i18n.com/topoteretes/cognee?lang=pt">Português</a> |
|
|
@@ -218,69 +214,65 @@ Build dynamic memory for Agents and replace RAG using scalable, modular ECL (Ext
|
|
|
218
214
|
</div>
|
|
219
215
|
</div>
|
|
220
216
|
|
|
217
|
+
## About Cognee
|
|
221
218
|
|
|
219
|
+
Cognee is an open-source tool and platform that transforms your raw data into persistent and dynamic AI memory for Agents. It combines vector search with graph databases to make your documents both searchable by meaning and connected by relationships.
|
|
222
220
|
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
Get started quickly with a Google Colab <a href="https://colab.research.google.com/drive/12Vi9zID-M3fpKpKiaqDBvkk98ElkRPWy?usp=sharing">notebook</a> , <a href="https://deepnote.com/workspace/cognee-382213d0-0444-4c89-8265-13770e333c02/project/cognee-demo-78ffacb9-5832-4611-bb1a-560386068b30/notebook/Notebook-1-75b24cda566d4c24ab348f7150792601?utm_source=share-modal&utm_medium=product-shared-content&utm_campaign=notebook&utm_content=78ffacb9-5832-4611-bb1a-560386068b30">Deepnote notebook</a> or <a href="https://github.com/topoteretes/cognee/tree/main/cognee-starter-kit">starter repo</a>
|
|
226
|
-
|
|
221
|
+
You can use Cognee in two ways:
|
|
227
222
|
|
|
228
|
-
|
|
223
|
+
1. [Self-host Cognee Open Source](https://docs.cognee.ai/getting-started/installation), which stores all data locally by default.
|
|
224
|
+
2. [Connect to Cognee Cloud](https://platform.cognee.ai/), and get the same OSS stack on managed infrastructure for easier development and productionization.
|
|
229
225
|
|
|
230
|
-
|
|
231
|
-
Our hosted solution is just our deployment of OSS cognee on Modal, with the goal of making development and productionization easier.
|
|
226
|
+
### Cognee Open Source (self-hosted):
|
|
232
227
|
|
|
233
|
-
|
|
228
|
+
- Interconnects any type of data — including past conversations, files, images, and audio transcriptions
|
|
229
|
+
- Replaces traditional RAG systems with a unified memory layer built on graphs and vectors
|
|
230
|
+
- Reduces developer effort and infrastructure cost while improving quality and precision
|
|
231
|
+
- Provides Pythonic data pipelines for ingestion from 30+ data sources
|
|
232
|
+
- Offers high customizability through user-defined tasks, modular pipelines, and built-in search endpoints
|
|
234
233
|
|
|
235
|
-
|
|
236
|
-
-
|
|
237
|
-
-
|
|
238
|
-
-
|
|
239
|
-
-
|
|
234
|
+
### Cognee Cloud (managed):
|
|
235
|
+
- Hosted web UI dashboard
|
|
236
|
+
- Automatic version updates
|
|
237
|
+
- Resource usage analytics
|
|
238
|
+
- GDPR compliant, enterprise-grade security
|
|
240
239
|
|
|
241
|
-
|
|
242
|
-
- Includes a managed UI and a [hosted solution](https://www.cognee.ai)
|
|
240
|
+
## Basic Usage & Feature Guide
|
|
243
241
|
|
|
242
|
+
To learn more, [check out this short, end-to-end Colab walkthrough](https://colab.research.google.com/drive/12Vi9zID-M3fpKpKiaqDBvkk98ElkRPWy?usp=sharing) of Cognee's core features.
|
|
244
243
|
|
|
244
|
+
[](https://colab.research.google.com/drive/12Vi9zID-M3fpKpKiaqDBvkk98ElkRPWy?usp=sharing)
|
|
245
245
|
|
|
246
|
-
##
|
|
246
|
+
## Quickstart
|
|
247
247
|
|
|
248
|
+
Let’s try Cognee in just a few lines of code. For detailed setup and configuration, see the [Cognee Docs](https://docs.cognee.ai/getting-started/installation#environment-configuration).
|
|
248
249
|
|
|
249
|
-
###
|
|
250
|
+
### Prerequisites
|
|
250
251
|
|
|
251
|
-
|
|
252
|
+
- Python 3.10 to 3.12
|
|
252
253
|
|
|
253
|
-
|
|
254
|
+
### Step 1: Install Cognee
|
|
254
255
|
|
|
255
|
-
|
|
256
|
+
You can install Cognee with **pip**, **poetry**, **uv**, or your preferred Python package manager.
|
|
256
257
|
|
|
257
258
|
```bash
|
|
258
259
|
uv pip install cognee
|
|
259
260
|
```
|
|
260
261
|
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
### 💻 Basic Usage
|
|
264
|
-
|
|
265
|
-
#### Setup
|
|
266
|
-
|
|
267
|
-
```
|
|
262
|
+
### Step 2: Configure the LLM
|
|
263
|
+
```python
|
|
268
264
|
import os
|
|
269
265
|
os.environ["LLM_API_KEY"] = "YOUR OPENAI_API_KEY"
|
|
270
|
-
|
|
271
266
|
```
|
|
267
|
+
Alternatively, create a `.env` file using our [template](https://github.com/topoteretes/cognee/blob/main/.env.template).
|
|
272
268
|
|
|
273
|
-
|
|
274
|
-
To use different LLM providers, for more info check out our <a href="https://docs.cognee.ai/setup-configuration/llm-providers">documentation</a>
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
#### Simple example
|
|
269
|
+
To integrate other LLM providers, see our [LLM Provider Documentation](https://docs.cognee.ai/setup-configuration/llm-providers).
|
|
278
270
|
|
|
271
|
+
### Step 3: Run the Pipeline
|
|
279
272
|
|
|
273
|
+
Cognee will take your documents, generate a knowledge graph from them and then query the graph based on combined relationships.
|
|
280
274
|
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
This script will run the default pipeline:
|
|
275
|
+
Now, run a minimal pipeline:
|
|
284
276
|
|
|
285
277
|
```python
|
|
286
278
|
import cognee
|
|
@@ -298,7 +290,7 @@ async def main():
|
|
|
298
290
|
await cognee.memify()
|
|
299
291
|
|
|
300
292
|
# Query the knowledge graph
|
|
301
|
-
results = await cognee.search("What does
|
|
293
|
+
results = await cognee.search("What does Cognee do?")
|
|
302
294
|
|
|
303
295
|
# Display the results
|
|
304
296
|
for result in results:
|
|
@@ -309,69 +301,61 @@ if __name__ == '__main__':
|
|
|
309
301
|
asyncio.run(main())
|
|
310
302
|
|
|
311
303
|
```
|
|
312
|
-
Example output:
|
|
313
|
-
```
|
|
314
|
-
Cognee turns documents into AI memory.
|
|
315
304
|
|
|
305
|
+
As you can see, the output is generated from the document we previously stored in Cognee:
|
|
306
|
+
|
|
307
|
+
```bash
|
|
308
|
+
Cognee turns documents into AI memory.
|
|
316
309
|
```
|
|
317
|
-
##### Via CLI
|
|
318
310
|
|
|
319
|
-
|
|
311
|
+
### Use the Cognee CLI
|
|
320
312
|
|
|
321
|
-
|
|
313
|
+
As an alternative, you can get started with these essential commands:
|
|
314
|
+
|
|
315
|
+
```bash
|
|
322
316
|
cognee-cli add "Cognee turns documents into AI memory."
|
|
323
317
|
|
|
324
318
|
cognee-cli cognify
|
|
325
319
|
|
|
326
|
-
cognee-cli search "What does
|
|
320
|
+
cognee-cli search "What does Cognee do?"
|
|
327
321
|
cognee-cli delete --all
|
|
328
322
|
|
|
329
323
|
```
|
|
330
|
-
|
|
331
|
-
|
|
324
|
+
|
|
325
|
+
To open the local UI, run:
|
|
326
|
+
```bash
|
|
332
327
|
cognee-cli -ui
|
|
333
328
|
```
|
|
334
329
|
|
|
330
|
+
## Demos & Examples
|
|
335
331
|
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
### Hosted Platform
|
|
340
|
-
|
|
341
|
-
Get up and running in minutes with automatic updates, analytics, and enterprise security.
|
|
342
|
-
|
|
343
|
-
1. Sign up on [cogwit](https://www.cognee.ai)
|
|
344
|
-
2. Add your API key to local UI and sync your data to Cogwit
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
## Demos
|
|
332
|
+
See Cognee in action:
|
|
350
333
|
|
|
351
|
-
|
|
334
|
+
### Persistent Agent Memory
|
|
352
335
|
|
|
353
|
-
[
|
|
336
|
+
[Cognee Memory for LangGraph Agents](https://github.com/user-attachments/assets/e113b628-7212-4a2b-b288-0be39a93a1c3)
|
|
354
337
|
|
|
355
|
-
|
|
338
|
+
### Simple GraphRAG
|
|
356
339
|
|
|
357
|
-
[
|
|
340
|
+
[Watch Demo](https://github.com/user-attachments/assets/f2186b2e-305a-42b0-9c2d-9f4473f15df8)
|
|
358
341
|
|
|
359
|
-
|
|
342
|
+
### Cognee with Ollama
|
|
360
343
|
|
|
361
|
-
[
|
|
344
|
+
[Watch Demo](https://github.com/user-attachments/assets/39672858-f774-4136-b957-1e2de67b8981)
|
|
362
345
|
|
|
363
346
|
|
|
364
|
-
##
|
|
365
|
-
Your contributions are at the core of making this a true open source project. Any contributions you make are **greatly appreciated**. See [`CONTRIBUTING.md`](CONTRIBUTING.md) for more information.
|
|
347
|
+
## Community & Support
|
|
366
348
|
|
|
349
|
+
### Contributing
|
|
350
|
+
We welcome contributions from the community! Your input helps make Cognee better for everyone. See [`CONTRIBUTING.md`](CONTRIBUTING.md) to get started.
|
|
367
351
|
|
|
368
|
-
|
|
352
|
+
### Code of Conduct
|
|
369
353
|
|
|
370
|
-
We
|
|
354
|
+
We're committed to fostering an inclusive and respectful community. Read our [Code of Conduct](https://github.com/topoteretes/cognee/blob/main/CODE_OF_CONDUCT.md) for guidelines.
|
|
371
355
|
|
|
372
|
-
## Citation
|
|
356
|
+
## Research & Citation
|
|
373
357
|
|
|
374
|
-
We
|
|
358
|
+
We recently published a research paper on optimizing knowledge graphs for LLM reasoning:
|
|
375
359
|
|
|
376
360
|
```bibtex
|
|
377
361
|
@misc{markovic2025optimizinginterfaceknowledgegraphs,
|
|
@@ -246,8 +246,8 @@ cognee/infrastructure/files/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeR
|
|
|
246
246
|
cognee/infrastructure/files/utils/extract_text_from_file.py,sha256=-v0uvK6nXP6Q2Ia0GjIi97WntPFX6sWZQXO_Fg9TrCc,1112
|
|
247
247
|
cognee/infrastructure/files/utils/get_data_file_path.py,sha256=Xz9anl6yYxK6wETKhVeK4f3ahjw58Aj8YkyJkJONOvc,1549
|
|
248
248
|
cognee/infrastructure/files/utils/get_file_content_hash.py,sha256=0L_wgsRF8zqmtisFWcp4agDs7WovvBjiVWNQ_NCPKwo,1338
|
|
249
|
-
cognee/infrastructure/files/utils/get_file_metadata.py,sha256=
|
|
250
|
-
cognee/infrastructure/files/utils/guess_file_type.py,sha256=
|
|
249
|
+
cognee/infrastructure/files/utils/get_file_metadata.py,sha256=3U0usuzEuGbVY0PBqQl9FyU1fWeHzlc8DRqNWZaSoc8,2336
|
|
250
|
+
cognee/infrastructure/files/utils/guess_file_type.py,sha256=s1CkS4DhhxyYIxlapGfVZKOQUbDwselljLRvlrP4BvE,1923
|
|
251
251
|
cognee/infrastructure/files/utils/is_text_content.py,sha256=iNZWCECNLMjlQfOQAujVQis7prA1cqsscRRSQsxccZo,1316
|
|
252
252
|
cognee/infrastructure/files/utils/open_data_file.py,sha256=3TPsTUDCH6SOuvbwNembE-YRiFDhb9yCqOC537b6iGY,2155
|
|
253
253
|
cognee/infrastructure/llm/LLMGateway.py,sha256=o_XXoj6qbTb2zO9MCxut81CkZODJUMtRbLAS854JSzY,2478
|
|
@@ -367,7 +367,7 @@ cognee/infrastructure/loaders/get_loader_engine.py,sha256=cPJefAHFAWU1HXQoWqCpwh
|
|
|
367
367
|
cognee/infrastructure/loaders/supported_loaders.py,sha256=LBCvqk6PGJsTtFB5vUpArPmxSegRr81y1oNAejzn1mE,961
|
|
368
368
|
cognee/infrastructure/loaders/use_loader.py,sha256=ncfUFVohPox296m8tMeIl6Hnk1xRvHcpRCmwZXKPZ1s,598
|
|
369
369
|
cognee/infrastructure/loaders/core/__init__.py,sha256=LTr8FWDXpG-Oxp8nwwn0KnHT97aIK6_FWiswmy7g40Q,230
|
|
370
|
-
cognee/infrastructure/loaders/core/audio_loader.py,sha256=
|
|
370
|
+
cognee/infrastructure/loaders/core/audio_loader.py,sha256=VBiJb3tgog51yP14CMLQP3RdF6cmj5dCsHeAHDMlCz8,3042
|
|
371
371
|
cognee/infrastructure/loaders/core/image_loader.py,sha256=b8etveiidIvCw7PXqM2ldyxXDhkqi4-Ak-4BbX664Is,3390
|
|
372
372
|
cognee/infrastructure/loaders/core/text_loader.py,sha256=zkFhjm_QeQu4fWv_Wkoe0O1Kpe9_uBgskkjeWn0sV-M,2991
|
|
373
373
|
cognee/infrastructure/loaders/external/__init__.py,sha256=UwLJK81I1Atuw3FN34EDy8NKe7sltxRLZiONYHfoW4o,884
|
|
@@ -483,7 +483,7 @@ cognee/modules/ingestion/discover_directory_datasets.py,sha256=wtqYoZ5MpGc_FuzyK
|
|
|
483
483
|
cognee/modules/ingestion/get_matched_datasets.py,sha256=BL2H_3t3wDWqcJxlo6uv-1u__g2E5OMwJYFsLCSDF34,475
|
|
484
484
|
cognee/modules/ingestion/identify.py,sha256=4-oD_VjdJC9oUmJjuLJ1a6BX1-GKbw-rNgWyB9GyhC8,346
|
|
485
485
|
cognee/modules/ingestion/save_data_to_file.py,sha256=SZFrWbkRCvENQ05JXAAKZgcVm4-s795ZPnhCgdGM5HY,1230
|
|
486
|
-
cognee/modules/ingestion/data_types/BinaryData.py,sha256=
|
|
486
|
+
cognee/modules/ingestion/data_types/BinaryData.py,sha256=UUo3MZdGaIePs0jbI_Nwwwr6HBnsdtqwt0FJU1d6rqw,1076
|
|
487
487
|
cognee/modules/ingestion/data_types/IngestionData.py,sha256=JLKzItByitgfQAeEo7-qaRRce_weij-t3YY_nJ4wFy0,309
|
|
488
488
|
cognee/modules/ingestion/data_types/S3BinaryData.py,sha256=Kdd4R2anhhIPQZ-5xihcWrMPY_MPHIfS4GJYP4ZeraU,1805
|
|
489
489
|
cognee/modules/ingestion/data_types/TextData.py,sha256=HpIgFqFHm66D-_bgEljUSsh4GSfsLaOj_ubFc_RalNQ,939
|
|
@@ -569,7 +569,7 @@ cognee/modules/retrieval/chunks_retriever.py,sha256=ntsF2mtCBIAt3c9a_tRd8MVJbxlQ
|
|
|
569
569
|
cognee/modules/retrieval/code_retriever.py,sha256=-U9sEX-3IAeH34o7tHlcBwDt2EEFlLNbXx9mh6jvPWI,9766
|
|
570
570
|
cognee/modules/retrieval/coding_rules_retriever.py,sha256=3GU259jTbGLqmp_A8sUdE4fyf0td06SKuxBJVW-npIQ,1134
|
|
571
571
|
cognee/modules/retrieval/completion_retriever.py,sha256=armrabXj84Sz_0DLXQR9A1VFU43AFoYdaxITn9tLeuQ,5353
|
|
572
|
-
cognee/modules/retrieval/cypher_search_retriever.py,sha256=
|
|
572
|
+
cognee/modules/retrieval/cypher_search_retriever.py,sha256=bDdJbw2icQeE1h24TtROOGWcCTAoGa7Ng-YPjBVZjZk,2888
|
|
573
573
|
cognee/modules/retrieval/graph_completion_context_extension_retriever.py,sha256=CigoPl2kZqlJzBrWvlozVd9wb-SZERzcSv6B1TUj6b8,6134
|
|
574
574
|
cognee/modules/retrieval/graph_completion_cot_retriever.py,sha256=YKAdpDtrZdGHm_ZMHd8bFkbvgKF0FaDtMWljvVS84bI,11052
|
|
575
575
|
cognee/modules/retrieval/graph_completion_retriever.py,sha256=-pk66LH6IhUfiSmLbCbpMkpQxHFvijd7vRQ4Ax8AEVs,10420
|
|
@@ -942,9 +942,9 @@ distributed/tasks/queued_add_edges.py,sha256=kz1DHE05y-kNHORQJjYWHUi6Q1QWUp_v3Dl
|
|
|
942
942
|
distributed/tasks/queued_add_nodes.py,sha256=aqK4Ij--ADwUWknxYpiwbYrpa6CcvFfqHWbUZW4Kh3A,452
|
|
943
943
|
distributed/workers/data_point_saving_worker.py,sha256=kmaQy2A2J7W3k9Gd5lyoiT0XYOaJmEM8MbkKVOFOQVU,4729
|
|
944
944
|
distributed/workers/graph_saving_worker.py,sha256=b5OPLLUq0OBALGekdp73JKxU0GrMlVbO4AfIhmACKkQ,4724
|
|
945
|
-
cognee-0.
|
|
946
|
-
cognee-0.
|
|
947
|
-
cognee-0.
|
|
948
|
-
cognee-0.
|
|
949
|
-
cognee-0.
|
|
950
|
-
cognee-0.
|
|
945
|
+
cognee-0.4.0.dist-info/METADATA,sha256=0gKutXhioeXNoqHwCwj-RYDSf4ZUYXhu9xq4VrknNL0,15317
|
|
946
|
+
cognee-0.4.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
947
|
+
cognee-0.4.0.dist-info/entry_points.txt,sha256=fAozOD9Vs4kgYwRhBiZoLCIXu-OSZqVxKGv45l19uok,88
|
|
948
|
+
cognee-0.4.0.dist-info/licenses/LICENSE,sha256=pHHjSQj1DD8SDppW88MMs04TPk7eAanL1c5xj8NY7NQ,11344
|
|
949
|
+
cognee-0.4.0.dist-info/licenses/NOTICE.md,sha256=6L3saP3kSpcingOxDh-SGjMS8GY79Rlh2dBNLaO0o5c,339
|
|
950
|
+
cognee-0.4.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|