cognee 0.3.2__py3-none-any.whl → 0.3.4.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognee/api/v1/notebooks/routers/get_notebooks_router.py +2 -1
- cognee/api/v1/search/search.py +1 -1
- cognee/infrastructure/databases/relational/sqlalchemy/SqlAlchemyAdapter.py +2 -2
- cognee/infrastructure/databases/vector/chromadb/ChromaDBAdapter.py +2 -6
- cognee/infrastructure/databases/vector/config.py +1 -1
- cognee/infrastructure/databases/vector/embeddings/OllamaEmbeddingEngine.py +2 -4
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/anthropic/adapter.py +4 -2
- cognee/infrastructure/utils/run_async.py +9 -4
- cognee/infrastructure/utils/run_sync.py +4 -3
- cognee/modules/notebooks/methods/create_tutorial_notebook.py +87 -0
- cognee/modules/notebooks/methods/get_notebook.py +2 -2
- cognee/modules/notebooks/methods/update_notebook.py +0 -1
- cognee/modules/notebooks/operations/run_in_local_sandbox.py +8 -5
- cognee/modules/retrieval/graph_completion_context_extension_retriever.py +1 -1
- cognee/modules/retrieval/graph_completion_cot_retriever.py +1 -1
- cognee/modules/retrieval/graph_completion_retriever.py +1 -1
- cognee/modules/retrieval/temporal_retriever.py +1 -1
- cognee/modules/retrieval/user_qa_feedback.py +1 -1
- cognee/modules/search/methods/search.py +12 -13
- cognee/modules/search/utils/prepare_search_result.py +31 -9
- cognee/modules/search/utils/transform_context_to_graph.py +1 -1
- cognee/modules/search/utils/transform_insights_to_graph.py +28 -0
- cognee/tasks/temporal_graph/models.py +11 -6
- cognee/tests/cli_tests/cli_unit_tests/test_cli_main.py +5 -5
- cognee/tests/test_temporal_graph.py +6 -34
- {cognee-0.3.2.dist-info → cognee-0.3.4.dev0.dist-info}/METADATA +5 -5
- {cognee-0.3.2.dist-info → cognee-0.3.4.dev0.dist-info}/RECORD +31 -31
- cognee-0.3.4.dev0.dist-info/entry_points.txt +2 -0
- cognee/api/v1/save/save.py +0 -335
- cognee/tests/test_save_export_path.py +0 -116
- cognee-0.3.2.dist-info/entry_points.txt +0 -2
- {cognee-0.3.2.dist-info → cognee-0.3.4.dev0.dist-info}/WHEEL +0 -0
- {cognee-0.3.2.dist-info → cognee-0.3.4.dev0.dist-info}/licenses/LICENSE +0 -0
- {cognee-0.3.2.dist-info → cognee-0.3.4.dev0.dist-info}/licenses/NOTICE.md +0 -0
|
@@ -1,116 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import asyncio
|
|
3
|
-
from uuid import uuid4
|
|
4
|
-
|
|
5
|
-
import pytest
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
@pytest.mark.asyncio
|
|
9
|
-
async def test_save_uses_custom_export_path(tmp_path, monkeypatch):
|
|
10
|
-
# Import target after tmp fixtures are ready
|
|
11
|
-
from cognee.api.v1.save import save as save_mod
|
|
12
|
-
|
|
13
|
-
# Prepare two mock datasets
|
|
14
|
-
class Dataset:
|
|
15
|
-
def __init__(self, id_, name):
|
|
16
|
-
self.id = id_
|
|
17
|
-
self.name = name
|
|
18
|
-
|
|
19
|
-
ds1 = Dataset(uuid4(), "dataset_alpha")
|
|
20
|
-
ds2 = Dataset(uuid4(), "dataset_beta")
|
|
21
|
-
|
|
22
|
-
# Mock dataset discovery
|
|
23
|
-
async def mock_get_authorized_existing_datasets(datasets, permission_type, user):
|
|
24
|
-
return [ds1, ds2]
|
|
25
|
-
|
|
26
|
-
monkeypatch.setattr(
|
|
27
|
-
save_mod, "get_authorized_existing_datasets", mock_get_authorized_existing_datasets
|
|
28
|
-
)
|
|
29
|
-
|
|
30
|
-
# Mock data items (with filename collision in ds1)
|
|
31
|
-
class DataItem:
|
|
32
|
-
def __init__(self, id_, name, original_path=None):
|
|
33
|
-
self.id = id_
|
|
34
|
-
self.name = name
|
|
35
|
-
self.original_data_location = original_path
|
|
36
|
-
|
|
37
|
-
ds1_items = [
|
|
38
|
-
DataItem(uuid4(), "report.txt", "/root/a/report.txt"),
|
|
39
|
-
DataItem(uuid4(), "report.txt", "/root/b/report.txt"), # collision
|
|
40
|
-
]
|
|
41
|
-
ds2_items = [
|
|
42
|
-
DataItem(uuid4(), "notes.md", "/root/x/notes.md"),
|
|
43
|
-
]
|
|
44
|
-
|
|
45
|
-
async def mock_get_dataset_data(dataset_id):
|
|
46
|
-
if dataset_id == ds1.id:
|
|
47
|
-
return ds1_items
|
|
48
|
-
if dataset_id == ds2.id:
|
|
49
|
-
return ds2_items
|
|
50
|
-
return []
|
|
51
|
-
|
|
52
|
-
monkeypatch.setattr(save_mod, "get_dataset_data", mock_get_dataset_data)
|
|
53
|
-
|
|
54
|
-
# Mock summary retrieval
|
|
55
|
-
async def mock_get_document_summaries_text(data_id: str) -> str:
|
|
56
|
-
return "This is a summary."
|
|
57
|
-
|
|
58
|
-
monkeypatch.setattr(save_mod, "_get_document_summaries_text", mock_get_document_summaries_text)
|
|
59
|
-
|
|
60
|
-
# Mock questions
|
|
61
|
-
async def mock_generate_questions(file_name: str, summary_text: str):
|
|
62
|
-
return ["Q1?", "Q2?", "Q3?"]
|
|
63
|
-
|
|
64
|
-
monkeypatch.setattr(save_mod, "_generate_questions", mock_generate_questions)
|
|
65
|
-
|
|
66
|
-
# Mock searches per question
|
|
67
|
-
async def mock_run_searches_for_question(question, dataset_id, search_types, top_k):
|
|
68
|
-
return {st.value: [f"{question} -> ok"] for st in search_types}
|
|
69
|
-
|
|
70
|
-
monkeypatch.setattr(save_mod, "_run_searches_for_question", mock_run_searches_for_question)
|
|
71
|
-
|
|
72
|
-
# Use custom export path
|
|
73
|
-
export_dir = tmp_path / "my_exports"
|
|
74
|
-
export_dir_str = str(export_dir)
|
|
75
|
-
|
|
76
|
-
# Run
|
|
77
|
-
result = await save_mod.save(
|
|
78
|
-
datasets=None,
|
|
79
|
-
export_root_directory=export_dir_str,
|
|
80
|
-
max_questions=3,
|
|
81
|
-
search_types=["GRAPH_COMPLETION", "INSIGHTS", "CHUNKS"],
|
|
82
|
-
top_k=2,
|
|
83
|
-
include_summary=True,
|
|
84
|
-
include_ascii_tree=True,
|
|
85
|
-
concurrency=2,
|
|
86
|
-
timeout=None,
|
|
87
|
-
)
|
|
88
|
-
|
|
89
|
-
# Verify returned mapping points to our custom path
|
|
90
|
-
assert str(ds1.id) in result and str(ds2.id) in result
|
|
91
|
-
assert result[str(ds1.id)].startswith(export_dir_str)
|
|
92
|
-
assert result[str(ds2.id)].startswith(export_dir_str)
|
|
93
|
-
|
|
94
|
-
# Verify directories and files exist
|
|
95
|
-
ds1_dir = result[str(ds1.id)]
|
|
96
|
-
ds2_dir = result[str(ds2.id)]
|
|
97
|
-
|
|
98
|
-
assert os.path.isdir(ds1_dir)
|
|
99
|
-
assert os.path.isdir(ds2_dir)
|
|
100
|
-
|
|
101
|
-
# index.md present
|
|
102
|
-
assert os.path.isfile(os.path.join(ds1_dir, "index.md"))
|
|
103
|
-
assert os.path.isfile(os.path.join(ds2_dir, "index.md"))
|
|
104
|
-
|
|
105
|
-
# File markdowns exist; collision handling: two files with similar base
|
|
106
|
-
ds1_files = [f for f in os.listdir(ds1_dir) if f.endswith(".md") and f != "index.md"]
|
|
107
|
-
assert len(ds1_files) == 2
|
|
108
|
-
assert any(f == "report.txt.md" for f in ds1_files)
|
|
109
|
-
assert any(f.startswith("report.txt__") and f.endswith(".md") for f in ds1_files)
|
|
110
|
-
|
|
111
|
-
# Content sanity: ensure question headers exist in one file
|
|
112
|
-
sample_md_path = os.path.join(ds1_dir, ds1_files[0])
|
|
113
|
-
with open(sample_md_path, "r", encoding="utf-8") as fh:
|
|
114
|
-
content = fh.read()
|
|
115
|
-
assert "## Question ideas" in content
|
|
116
|
-
assert "## Searches" in content
|
|
File without changes
|
|
File without changes
|
|
File without changes
|