cognee 0.3.3__py3-none-any.whl → 0.3.4.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognee/api/v1/search/search.py +1 -1
- cognee/infrastructure/databases/vector/chromadb/ChromaDBAdapter.py +2 -6
- cognee/infrastructure/databases/vector/embeddings/OllamaEmbeddingEngine.py +2 -4
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/anthropic/adapter.py +4 -2
- cognee/modules/notebooks/methods/create_tutorial_notebook.py +9 -14
- cognee/modules/search/methods/search.py +12 -13
- cognee/modules/search/utils/prepare_search_result.py +28 -6
- cognee/modules/search/utils/transform_context_to_graph.py +1 -1
- cognee/modules/search/utils/transform_insights_to_graph.py +28 -0
- cognee/tasks/temporal_graph/models.py +11 -6
- cognee/tests/cli_tests/cli_unit_tests/test_cli_main.py +5 -5
- cognee/tests/test_temporal_graph.py +6 -34
- {cognee-0.3.3.dist-info → cognee-0.3.4.dev0.dist-info}/METADATA +5 -5
- {cognee-0.3.3.dist-info → cognee-0.3.4.dev0.dist-info}/RECORD +18 -17
- {cognee-0.3.3.dist-info → cognee-0.3.4.dev0.dist-info}/WHEEL +0 -0
- {cognee-0.3.3.dist-info → cognee-0.3.4.dev0.dist-info}/entry_points.txt +0 -0
- {cognee-0.3.3.dist-info → cognee-0.3.4.dev0.dist-info}/licenses/LICENSE +0 -0
- {cognee-0.3.3.dist-info → cognee-0.3.4.dev0.dist-info}/licenses/NOTICE.md +0 -0
cognee/api/v1/search/search.py
CHANGED
|
@@ -22,7 +22,7 @@ async def search(
|
|
|
22
22
|
node_type: Optional[Type] = NodeSet,
|
|
23
23
|
node_name: Optional[List[str]] = None,
|
|
24
24
|
save_interaction: bool = False,
|
|
25
|
-
last_k: Optional[int] =
|
|
25
|
+
last_k: Optional[int] = 1,
|
|
26
26
|
only_context: bool = False,
|
|
27
27
|
use_combined_context: bool = False,
|
|
28
28
|
) -> Union[List[SearchResult], CombinedSearchResult]:
|
|
@@ -83,7 +83,7 @@ def process_data_for_chroma(data):
|
|
|
83
83
|
elif isinstance(value, list):
|
|
84
84
|
# Store lists as JSON strings with special prefix
|
|
85
85
|
processed_data[f"{key}__list"] = json.dumps(value)
|
|
86
|
-
elif isinstance(value, (str, int, float, bool))
|
|
86
|
+
elif isinstance(value, (str, int, float, bool)):
|
|
87
87
|
processed_data[key] = value
|
|
88
88
|
else:
|
|
89
89
|
processed_data[key] = str(value)
|
|
@@ -553,8 +553,4 @@ class ChromaDBAdapter(VectorDBInterface):
|
|
|
553
553
|
Returns a list of collection names.
|
|
554
554
|
"""
|
|
555
555
|
client = await self.get_connection()
|
|
556
|
-
|
|
557
|
-
return [
|
|
558
|
-
collection.name if hasattr(collection, "name") else collection["name"]
|
|
559
|
-
for collection in collections
|
|
560
|
-
]
|
|
556
|
+
return await client.list_collections()
|
|
@@ -94,10 +94,8 @@ class OllamaEmbeddingEngine(EmbeddingEngine):
|
|
|
94
94
|
"""
|
|
95
95
|
Internal method to call the Ollama embeddings endpoint for a single prompt.
|
|
96
96
|
"""
|
|
97
|
-
payload = {
|
|
98
|
-
|
|
99
|
-
"prompt": prompt,
|
|
100
|
-
}
|
|
97
|
+
payload = {"model": self.model, "prompt": prompt, "input": prompt}
|
|
98
|
+
|
|
101
99
|
headers = {}
|
|
102
100
|
api_key = os.getenv("LLM_API_KEY")
|
|
103
101
|
if api_key:
|
cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/anthropic/adapter.py
CHANGED
|
@@ -12,6 +12,7 @@ from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.ll
|
|
|
12
12
|
)
|
|
13
13
|
|
|
14
14
|
from cognee.infrastructure.llm.LLMGateway import LLMGateway
|
|
15
|
+
from cognee.infrastructure.llm.config import get_llm_config
|
|
15
16
|
|
|
16
17
|
|
|
17
18
|
class AnthropicAdapter(LLMInterface):
|
|
@@ -27,7 +28,8 @@ class AnthropicAdapter(LLMInterface):
|
|
|
27
28
|
import anthropic
|
|
28
29
|
|
|
29
30
|
self.aclient = instructor.patch(
|
|
30
|
-
create=anthropic.AsyncAnthropic().messages.create,
|
|
31
|
+
create=anthropic.AsyncAnthropic(api_key=get_llm_config().llm_api_key).messages.create,
|
|
32
|
+
mode=instructor.Mode.ANTHROPIC_TOOLS,
|
|
31
33
|
)
|
|
32
34
|
|
|
33
35
|
self.model = model
|
|
@@ -57,7 +59,7 @@ class AnthropicAdapter(LLMInterface):
|
|
|
57
59
|
|
|
58
60
|
return await self.aclient(
|
|
59
61
|
model=self.model,
|
|
60
|
-
|
|
62
|
+
max_tokens=4096,
|
|
61
63
|
max_retries=5,
|
|
62
64
|
messages=[
|
|
63
65
|
{
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
|
|
2
1
|
from uuid import UUID, uuid4
|
|
3
2
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
4
3
|
|
|
@@ -28,14 +27,14 @@ async def create_tutorial_notebook(user_id: UUID, session: AsyncSession):
|
|
|
28
27
|
session=session,
|
|
29
28
|
)
|
|
30
29
|
|
|
30
|
+
|
|
31
31
|
cell_content = [
|
|
32
|
-
"""
|
|
32
|
+
"""
|
|
33
33
|
# Using Cognee with Python Development Data
|
|
34
34
|
|
|
35
35
|
Unite authoritative Python practice (Guido van Rossum's own contributions!), normative guidance (Zen/PEP 8), and your lived context (rules + conversations) into one *AI memory* that produces answers that are relevant, explainable, and consistent.
|
|
36
36
|
""",
|
|
37
|
-
|
|
38
|
-
"""
|
|
37
|
+
"""
|
|
39
38
|
## What You'll Learn
|
|
40
39
|
|
|
41
40
|
In this comprehensive tutorial, you'll discover how to transform scattered development data into an intelligent knowledge system that enhances your coding workflow. By the end, you'll have:
|
|
@@ -46,8 +45,7 @@ In this comprehensive tutorial, you'll discover how to transform scattered devel
|
|
|
46
45
|
|
|
47
46
|
This tutorial demonstrates the power of **knowledge graphs** and **retrieval-augmented generation (RAG)** for software development, showing you how to build systems that learn from Python's creator and improve your own Python development.
|
|
48
47
|
""",
|
|
49
|
-
|
|
50
|
-
"""
|
|
48
|
+
"""
|
|
51
49
|
## Cognee and its core operations
|
|
52
50
|
|
|
53
51
|
Before we dive in, let's understand the core Cognee operations we'll be working with:
|
|
@@ -56,8 +54,7 @@ Before we dive in, let's understand the core Cognee operations we'll be working
|
|
|
56
54
|
- `cognee.search()` - Queries the knowledge graph with natural language or Cypher
|
|
57
55
|
- `cognee.memify()` - Cognee's \"secret sauce\" that infers implicit connections and rules from your data
|
|
58
56
|
""",
|
|
59
|
-
|
|
60
|
-
"""
|
|
57
|
+
"""
|
|
61
58
|
## Data used in this tutorial
|
|
62
59
|
|
|
63
60
|
Cognee can ingest many types of sources. In this tutorial, we use a small, concrete set of files that cover different perspectives:
|
|
@@ -67,8 +64,7 @@ Cognee can ingest many types of sources. In this tutorial, we use a small, concr
|
|
|
67
64
|
- `my_developer_rules.md` — Local constraints. Your house rules, conventions, and project-specific requirements (scope, privacy, Spec.md). Keeps recommendations relevant to your actual workflow.
|
|
68
65
|
- `copilot_conversations.json` — Personal history. Transcripts of real assistant conversations, including your questions, code snippets, and discussion topics. Captures "how you code" and connects it to "how Guido codes."
|
|
69
66
|
""",
|
|
70
|
-
|
|
71
|
-
"""
|
|
67
|
+
"""
|
|
72
68
|
# Preliminaries
|
|
73
69
|
|
|
74
70
|
To strike the balanace between speed, cost, anc quality, we recommend using OpenAI's `4o-mini` model; make sure your `.env` file contains this line:
|
|
@@ -76,8 +72,7 @@ To strike the balanace between speed, cost, anc quality, we recommend using Open
|
|
|
76
72
|
LLM_MODEL="gpt-4o-mini"
|
|
77
73
|
`
|
|
78
74
|
""",
|
|
79
|
-
|
|
80
|
-
"""
|
|
75
|
+
"""
|
|
81
76
|
import cognee
|
|
82
77
|
|
|
83
78
|
result = await cognee.add(
|
|
@@ -88,5 +83,5 @@ result = await cognee.add(
|
|
|
88
83
|
await cognee.cognify(temporal_cognify=True)
|
|
89
84
|
|
|
90
85
|
results = await cognee.search("Show me commits")
|
|
91
|
-
"""
|
|
92
|
-
]
|
|
86
|
+
""",
|
|
87
|
+
]
|
|
@@ -136,12 +136,19 @@ async def search(
|
|
|
136
136
|
if os.getenv("ENABLE_BACKEND_ACCESS_CONTROL", "false").lower() == "true":
|
|
137
137
|
return_value = []
|
|
138
138
|
for search_result in search_results:
|
|
139
|
-
|
|
139
|
+
prepared_search_results = await prepare_search_result(search_result)
|
|
140
|
+
|
|
141
|
+
result = prepared_search_results["result"]
|
|
142
|
+
graphs = prepared_search_results["graphs"]
|
|
143
|
+
context = prepared_search_results["context"]
|
|
144
|
+
datasets = prepared_search_results["datasets"]
|
|
145
|
+
|
|
140
146
|
return_value.append(
|
|
141
147
|
{
|
|
142
|
-
"search_result": result,
|
|
148
|
+
"search_result": [result] if result else None,
|
|
143
149
|
"dataset_id": datasets[0].id,
|
|
144
150
|
"dataset_name": datasets[0].name,
|
|
151
|
+
"graphs": graphs,
|
|
145
152
|
}
|
|
146
153
|
)
|
|
147
154
|
return return_value
|
|
@@ -155,14 +162,6 @@ async def search(
|
|
|
155
162
|
return return_value[0]
|
|
156
163
|
else:
|
|
157
164
|
return return_value
|
|
158
|
-
# return [
|
|
159
|
-
# SearchResult(
|
|
160
|
-
# search_result=result,
|
|
161
|
-
# dataset_id=datasets[min(index, len(datasets) - 1)].id if datasets else None,
|
|
162
|
-
# dataset_name=datasets[min(index, len(datasets) - 1)].name if datasets else None,
|
|
163
|
-
# )
|
|
164
|
-
# for index, (result, _, datasets) in enumerate(search_results)
|
|
165
|
-
# ]
|
|
166
165
|
|
|
167
166
|
|
|
168
167
|
async def authorized_search(
|
|
@@ -208,11 +207,11 @@ async def authorized_search(
|
|
|
208
207
|
context = {}
|
|
209
208
|
datasets: List[Dataset] = []
|
|
210
209
|
|
|
211
|
-
for _, search_context,
|
|
212
|
-
for dataset in
|
|
210
|
+
for _, search_context, search_datasets in search_responses:
|
|
211
|
+
for dataset in search_datasets:
|
|
213
212
|
context[str(dataset.id)] = search_context
|
|
214
213
|
|
|
215
|
-
datasets.extend(
|
|
214
|
+
datasets.extend(search_datasets)
|
|
216
215
|
|
|
217
216
|
specific_search_tools = await get_search_type_tools(
|
|
218
217
|
query_type=query_type,
|
|
@@ -1,8 +1,11 @@
|
|
|
1
1
|
from typing import List, cast
|
|
2
|
+
from uuid import uuid5, NAMESPACE_OID
|
|
2
3
|
|
|
3
4
|
from cognee.modules.graph.utils import resolve_edges_to_text
|
|
4
5
|
from cognee.modules.graph.cognee_graph.CogneeGraphElements import Edge
|
|
6
|
+
from cognee.modules.search.types.SearchResult import SearchResultDataset
|
|
5
7
|
from cognee.modules.search.utils.transform_context_to_graph import transform_context_to_graph
|
|
8
|
+
from cognee.modules.search.utils.transform_insights_to_graph import transform_insights_to_graph
|
|
6
9
|
|
|
7
10
|
|
|
8
11
|
async def prepare_search_result(search_result):
|
|
@@ -12,29 +15,48 @@ async def prepare_search_result(search_result):
|
|
|
12
15
|
result_graph = None
|
|
13
16
|
context_texts = {}
|
|
14
17
|
|
|
15
|
-
if isinstance(
|
|
18
|
+
if isinstance(datasets, list) and len(datasets) == 0:
|
|
19
|
+
datasets = [
|
|
20
|
+
SearchResultDataset(
|
|
21
|
+
id=uuid5(NAMESPACE_OID, "*"),
|
|
22
|
+
name="all available datasets",
|
|
23
|
+
)
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
if (
|
|
27
|
+
isinstance(context, List)
|
|
28
|
+
and len(context) > 0
|
|
29
|
+
and isinstance(context[0], tuple)
|
|
30
|
+
and context[0][1].get("relationship_name")
|
|
31
|
+
):
|
|
32
|
+
context_graph = transform_insights_to_graph(context)
|
|
33
|
+
graphs = {
|
|
34
|
+
", ".join([dataset.name for dataset in datasets]): context_graph,
|
|
35
|
+
}
|
|
36
|
+
results = None
|
|
37
|
+
elif isinstance(context, List) and len(context) > 0 and isinstance(context[0], Edge):
|
|
16
38
|
context_graph = transform_context_to_graph(context)
|
|
17
39
|
|
|
18
40
|
graphs = {
|
|
19
|
-
"
|
|
41
|
+
", ".join([dataset.name for dataset in datasets]): context_graph,
|
|
20
42
|
}
|
|
21
43
|
context_texts = {
|
|
22
|
-
"
|
|
44
|
+
", ".join([dataset.name for dataset in datasets]): await resolve_edges_to_text(context),
|
|
23
45
|
}
|
|
24
46
|
elif isinstance(context, str):
|
|
25
47
|
context_texts = {
|
|
26
|
-
"
|
|
48
|
+
", ".join([dataset.name for dataset in datasets]): context,
|
|
27
49
|
}
|
|
28
50
|
elif isinstance(context, List) and len(context) > 0 and isinstance(context[0], str):
|
|
29
51
|
context_texts = {
|
|
30
|
-
"
|
|
52
|
+
", ".join([dataset.name for dataset in datasets]): "\n".join(cast(List[str], context)),
|
|
31
53
|
}
|
|
32
54
|
|
|
33
55
|
if isinstance(results, List) and len(results) > 0 and isinstance(results[0], Edge):
|
|
34
56
|
result_graph = transform_context_to_graph(results)
|
|
35
57
|
|
|
36
58
|
return {
|
|
37
|
-
"result": result_graph or results[0] if len(results) == 1 else results,
|
|
59
|
+
"result": result_graph or results[0] if results and len(results) == 1 else results,
|
|
38
60
|
"graphs": graphs,
|
|
39
61
|
"context": context_texts,
|
|
40
62
|
"datasets": datasets,
|
|
@@ -14,7 +14,7 @@ def transform_context_to_graph(context: List[Edge]):
|
|
|
14
14
|
if "name" in triplet.node1.attributes
|
|
15
15
|
else triplet.node1.id,
|
|
16
16
|
"type": triplet.node1.attributes["type"],
|
|
17
|
-
"attributes": triplet.
|
|
17
|
+
"attributes": triplet.node1.attributes,
|
|
18
18
|
}
|
|
19
19
|
nodes[triplet.node2.id] = {
|
|
20
20
|
"id": triplet.node2.id,
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from typing import Dict, List, Tuple
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def transform_insights_to_graph(context: List[Tuple[Dict, Dict, Dict]]):
|
|
5
|
+
nodes = {}
|
|
6
|
+
edges = {}
|
|
7
|
+
|
|
8
|
+
for triplet in context:
|
|
9
|
+
nodes[triplet[0]["id"]] = {
|
|
10
|
+
"id": triplet[0]["id"],
|
|
11
|
+
"label": triplet[0]["name"] if "name" in triplet[0] else triplet[0]["id"],
|
|
12
|
+
"type": triplet[0]["type"],
|
|
13
|
+
}
|
|
14
|
+
nodes[triplet[2]["id"]] = {
|
|
15
|
+
"id": triplet[2]["id"],
|
|
16
|
+
"label": triplet[2]["name"] if "name" in triplet[2] else triplet[2]["id"],
|
|
17
|
+
"type": triplet[2]["type"],
|
|
18
|
+
}
|
|
19
|
+
edges[f"{triplet[0]['id']}_{triplet[1]['relationship_name']}_{triplet[2]['id']}"] = {
|
|
20
|
+
"source": triplet[0]["id"],
|
|
21
|
+
"target": triplet[2]["id"],
|
|
22
|
+
"label": triplet[1]["relationship_name"],
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
return {
|
|
26
|
+
"nodes": list(nodes.values()),
|
|
27
|
+
"edges": list(edges.values()),
|
|
28
|
+
}
|
|
@@ -3,12 +3,17 @@ from pydantic import BaseModel, Field
|
|
|
3
3
|
|
|
4
4
|
|
|
5
5
|
class Timestamp(BaseModel):
|
|
6
|
-
year: int = Field(
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
6
|
+
year: int = Field(
|
|
7
|
+
...,
|
|
8
|
+
ge=1,
|
|
9
|
+
le=9999,
|
|
10
|
+
description="Always required. If only a year is known, use it.",
|
|
11
|
+
)
|
|
12
|
+
month: int = Field(1, ge=1, le=12, description="If unknown, default to 1")
|
|
13
|
+
day: int = Field(1, ge=1, le=31, description="If unknown, default to 1")
|
|
14
|
+
hour: int = Field(0, ge=0, le=23, description="If unknown, default to 0")
|
|
15
|
+
minute: int = Field(0, ge=0, le=59, description="If unknown, default to 0")
|
|
16
|
+
second: int = Field(0, ge=0, le=59, description="If unknown, default to 0")
|
|
12
17
|
|
|
13
18
|
|
|
14
19
|
class Interval(BaseModel):
|
|
@@ -49,7 +49,7 @@ class TestCliMain:
|
|
|
49
49
|
def test_main_no_command(self, mock_create_parser):
|
|
50
50
|
"""Test main function when no command is provided"""
|
|
51
51
|
mock_parser = MagicMock()
|
|
52
|
-
mock_parser.parse_args.return_value = MagicMock(command=None)
|
|
52
|
+
mock_parser.parse_args.return_value = MagicMock(command=None, spec={})
|
|
53
53
|
mock_create_parser.return_value = (mock_parser, {})
|
|
54
54
|
|
|
55
55
|
result = main()
|
|
@@ -64,7 +64,7 @@ class TestCliMain:
|
|
|
64
64
|
mock_command.execute.return_value = None
|
|
65
65
|
|
|
66
66
|
mock_parser = MagicMock()
|
|
67
|
-
mock_args = MagicMock(command="test")
|
|
67
|
+
mock_args = MagicMock(command="test", spec={})
|
|
68
68
|
mock_parser.parse_args.return_value = mock_args
|
|
69
69
|
|
|
70
70
|
mock_create_parser.return_value = (mock_parser, {"test": mock_command})
|
|
@@ -84,7 +84,7 @@ class TestCliMain:
|
|
|
84
84
|
mock_command.execute.side_effect = CliCommandException("Test error", error_code=2)
|
|
85
85
|
|
|
86
86
|
mock_parser = MagicMock()
|
|
87
|
-
mock_args = MagicMock(command="test")
|
|
87
|
+
mock_args = MagicMock(command="test", spec={})
|
|
88
88
|
mock_parser.parse_args.return_value = mock_args
|
|
89
89
|
|
|
90
90
|
mock_create_parser.return_value = (mock_parser, {"test": mock_command})
|
|
@@ -103,7 +103,7 @@ class TestCliMain:
|
|
|
103
103
|
mock_command.execute.side_effect = Exception("Generic error")
|
|
104
104
|
|
|
105
105
|
mock_parser = MagicMock()
|
|
106
|
-
mock_args = MagicMock(command="test")
|
|
106
|
+
mock_args = MagicMock(command="test", spec={})
|
|
107
107
|
mock_parser.parse_args.return_value = mock_args
|
|
108
108
|
|
|
109
109
|
mock_create_parser.return_value = (mock_parser, {"test": mock_command})
|
|
@@ -126,7 +126,7 @@ class TestCliMain:
|
|
|
126
126
|
mock_command.execute.side_effect = test_exception
|
|
127
127
|
|
|
128
128
|
mock_parser = MagicMock()
|
|
129
|
-
mock_args = MagicMock(command="test")
|
|
129
|
+
mock_args = MagicMock(command="test", spec={})
|
|
130
130
|
mock_parser.parse_args.return_value = mock_args
|
|
131
131
|
|
|
132
132
|
mock_create_parser.return_value = (mock_parser, {"test": mock_command})
|
|
@@ -97,7 +97,7 @@ async def main():
|
|
|
97
97
|
f"Expected exactly one DocumentChunk, but found {type_counts.get('DocumentChunk', 0)}"
|
|
98
98
|
)
|
|
99
99
|
|
|
100
|
-
assert type_counts.get("Entity", 0) >=
|
|
100
|
+
assert type_counts.get("Entity", 0) >= 10, (
|
|
101
101
|
f"Expected multiple entities (assert is set to 20), but found {type_counts.get('Entity', 0)}"
|
|
102
102
|
)
|
|
103
103
|
|
|
@@ -105,52 +105,24 @@ async def main():
|
|
|
105
105
|
f"Expected multiple entity types, but found {type_counts.get('EntityType', 0)}"
|
|
106
106
|
)
|
|
107
107
|
|
|
108
|
-
assert type_counts.get("Event", 0) >=
|
|
108
|
+
assert type_counts.get("Event", 0) >= 10, (
|
|
109
109
|
f"Expected multiple events (assert is set to 20), but found {type_counts.get('Event', 0)}"
|
|
110
110
|
)
|
|
111
111
|
|
|
112
|
-
assert type_counts.get("Timestamp", 0) >=
|
|
113
|
-
f"Expected multiple timestamps (assert is set to
|
|
112
|
+
assert type_counts.get("Timestamp", 0) >= 10, (
|
|
113
|
+
f"Expected multiple timestamps (assert is set to 10), but found {type_counts.get('Timestamp', 0)}"
|
|
114
114
|
)
|
|
115
115
|
|
|
116
|
-
assert
|
|
117
|
-
f"Expected multiple intervals, but found {type_counts.get('Interval', 0)}"
|
|
118
|
-
)
|
|
119
|
-
|
|
120
|
-
assert edge_type_counts.get("contains", 0) >= 20, (
|
|
116
|
+
assert edge_type_counts.get("contains", 0) >= 10, (
|
|
121
117
|
f"Expected multiple 'contains' edge, but found {edge_type_counts.get('contains', 0)}"
|
|
122
118
|
)
|
|
123
119
|
|
|
124
|
-
assert edge_type_counts.get("is_a", 0) >=
|
|
120
|
+
assert edge_type_counts.get("is_a", 0) >= 10, (
|
|
125
121
|
f"Expected multiple 'is_a' edge, but found {edge_type_counts.get('is_a', 0)}"
|
|
126
122
|
)
|
|
127
123
|
|
|
128
|
-
assert edge_type_counts.get("during", 0) == type_counts.get("Interval", 0), (
|
|
129
|
-
"Expected the same amount of during and interval objects in the graph"
|
|
130
|
-
)
|
|
131
|
-
|
|
132
|
-
assert edge_type_counts.get("during", 0) == type_counts.get("Interval", 0), (
|
|
133
|
-
"Expected the same amount of during and interval objects in the graph"
|
|
134
|
-
)
|
|
135
|
-
|
|
136
|
-
assert edge_type_counts.get("time_from", 0) == type_counts.get("Interval", 0), (
|
|
137
|
-
"Expected the same amount of time_from and interval objects in the graph"
|
|
138
|
-
)
|
|
139
|
-
|
|
140
|
-
assert edge_type_counts.get("time_to", 0) == type_counts.get("Interval", 0), (
|
|
141
|
-
"Expected the same amount of time_to and interval objects in the graph"
|
|
142
|
-
)
|
|
143
|
-
|
|
144
124
|
retriever = TemporalRetriever()
|
|
145
125
|
|
|
146
|
-
result_before = await retriever.extract_time_from_query("What happened before 1890?")
|
|
147
|
-
|
|
148
|
-
assert result_before[0] is None
|
|
149
|
-
|
|
150
|
-
result_after = await retriever.extract_time_from_query("What happened after 1891?")
|
|
151
|
-
|
|
152
|
-
assert result_after[1] is None
|
|
153
|
-
|
|
154
126
|
result_between = await retriever.extract_time_from_query("What happened between 1890 and 1900?")
|
|
155
127
|
|
|
156
128
|
assert result_between[1]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cognee
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.4.dev0
|
|
4
4
|
Summary: Cognee - is a library for enriching LLM context with a semantic layer for better understanding and reasoning.
|
|
5
5
|
Project-URL: Homepage, https://www.cognee.ai
|
|
6
6
|
Project-URL: Repository, https://github.com/topoteretes/cognee
|
|
@@ -57,7 +57,7 @@ Requires-Dist: structlog<26,>=25.2.0
|
|
|
57
57
|
Requires-Dist: tiktoken<1.0.0,>=0.8.0
|
|
58
58
|
Requires-Dist: typing-extensions<5.0.0,>=4.12.2
|
|
59
59
|
Provides-Extra: anthropic
|
|
60
|
-
Requires-Dist: anthropic
|
|
60
|
+
Requires-Dist: anthropic>=0.27; extra == 'anthropic'
|
|
61
61
|
Provides-Extra: api
|
|
62
62
|
Requires-Dist: gunicorn<24,>=20.1.0; extra == 'api'
|
|
63
63
|
Requires-Dist: uvicorn<1.0.0,>=0.34.0; extra == 'api'
|
|
@@ -65,8 +65,8 @@ Requires-Dist: websockets<16.0.0,>=15.0.1; extra == 'api'
|
|
|
65
65
|
Provides-Extra: aws
|
|
66
66
|
Requires-Dist: s3fs[boto3]==2025.3.2; extra == 'aws'
|
|
67
67
|
Provides-Extra: chromadb
|
|
68
|
-
Requires-Dist: chromadb<0.7,>=0.
|
|
69
|
-
Requires-Dist: pypika==0.48.
|
|
68
|
+
Requires-Dist: chromadb<0.7,>=0.6; extra == 'chromadb'
|
|
69
|
+
Requires-Dist: pypika==0.48.9; extra == 'chromadb'
|
|
70
70
|
Provides-Extra: codegraph
|
|
71
71
|
Requires-Dist: fastembed<=0.6.0; (python_version < '3.13') and extra == 'codegraph'
|
|
72
72
|
Requires-Dist: transformers<5,>=4.46.3; extra == 'codegraph'
|
|
@@ -316,7 +316,7 @@ You can also cognify your files and query using cognee UI.
|
|
|
316
316
|
|
|
317
317
|
<img src="assets/cognee-new-ui.webp" width="100%" alt="Cognee UI 2"></a>
|
|
318
318
|
|
|
319
|
-
Try cognee UI by runnning ``` cognee -ui ``` command on your terminal.
|
|
319
|
+
Try cognee UI by runnning ``` cognee-cli -ui ``` command on your terminal.
|
|
320
320
|
|
|
321
321
|
## Understand our architecture
|
|
322
322
|
|
|
@@ -54,7 +54,7 @@ cognee/api/v1/responses/routers/__init__.py,sha256=X2qishwGRVFXawnvkZ5bv420PuPRL
|
|
|
54
54
|
cognee/api/v1/responses/routers/default_tools.py,sha256=9qqzEZhrt3_YMKzUA06ke8P-2WeLXhYpKgVW6mLHlzw,3004
|
|
55
55
|
cognee/api/v1/responses/routers/get_responses_router.py,sha256=ggbLhY9IXaInCgIs5TUuOCkFW64xmTKZQsc2ENq2Ocs,5979
|
|
56
56
|
cognee/api/v1/search/__init__.py,sha256=Sqw60DcOj4Bnvt-EWFknT31sPcvROIRKCWLr5pbkFr4,39
|
|
57
|
-
cognee/api/v1/search/search.py,sha256=
|
|
57
|
+
cognee/api/v1/search/search.py,sha256=WhBtj90nW9ulas_dm8lX72VYGMmWVdcrC7nAfxcQgso,8821
|
|
58
58
|
cognee/api/v1/search/routers/__init__.py,sha256=6RebeLX_2NTRxIMPH_mGuLztPxnGnMJK1y_O93CtRm8,49
|
|
59
59
|
cognee/api/v1/search/routers/get_search_router.py,sha256=-5GLgHipflEblYAwl3uiPAZ2i3TgrLEjDuiO_cCqcB8,6252
|
|
60
60
|
cognee/api/v1/settings/routers/__init__.py,sha256=wj_UYAXNMPCkn6Mo1YB01dCBiV9DQwTIf6OWjnGRpf8,53
|
|
@@ -193,12 +193,12 @@ cognee/infrastructure/databases/vector/supported_databases.py,sha256=0UIYcQ15p7-
|
|
|
193
193
|
cognee/infrastructure/databases/vector/use_vector_adapter.py,sha256=ab2x6-sxVDu_tf4zWChN_ngqv8LaLYk2VCtBjZEyjaM,174
|
|
194
194
|
cognee/infrastructure/databases/vector/utils.py,sha256=WHPSMFsN2XK72uURvCl_jlzQa-N3XKPhrDnB6GKmBtM,1224
|
|
195
195
|
cognee/infrastructure/databases/vector/vector_db_interface.py,sha256=EUpRVyMyS0MOQwFEgxwRa_9MY1vYotCyO6CONM81r94,7118
|
|
196
|
-
cognee/infrastructure/databases/vector/chromadb/ChromaDBAdapter.py,sha256=
|
|
196
|
+
cognee/infrastructure/databases/vector/chromadb/ChromaDBAdapter.py,sha256=c8oREW4EcX_TL2i-JdCRsi5EOtPxrtxpYkaUzc8IolU,18775
|
|
197
197
|
cognee/infrastructure/databases/vector/chromadb/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
198
198
|
cognee/infrastructure/databases/vector/embeddings/EmbeddingEngine.py,sha256=boNJ55dxJQ_ImW1_DDjToQa0Hos9mkeRYwfCI7UPLn0,983
|
|
199
199
|
cognee/infrastructure/databases/vector/embeddings/FastembedEmbeddingEngine.py,sha256=_R3yIuDaMN2lz9JhMy6SNpZeeCRZxHA9hmSB3gOxKkA,3823
|
|
200
200
|
cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py,sha256=XUZnVftE57qWlAebr99aOEg-FynMKB7IS-kmBBT8E5Y,7544
|
|
201
|
-
cognee/infrastructure/databases/vector/embeddings/OllamaEmbeddingEngine.py,sha256=
|
|
201
|
+
cognee/infrastructure/databases/vector/embeddings/OllamaEmbeddingEngine.py,sha256=uR9ItOYN0ySsnPrmHGaoLGjiKJcFF-88KMwbtH6j0DU,4173
|
|
202
202
|
cognee/infrastructure/databases/vector/embeddings/__init__.py,sha256=Akv-ShdXjHw-BE00Gw55GgGxIMr0SZ9FHi3RlpsJmiE,55
|
|
203
203
|
cognee/infrastructure/databases/vector/embeddings/config.py,sha256=s9acnhn1DLFggCNJMVcN9AxruMf3J00O_R--JVGqMNs,2221
|
|
204
204
|
cognee/infrastructure/databases/vector/embeddings/embedding_rate_limiter.py,sha256=TyCoo_SipQ6JNy5eqXY2shrZnhb2JVjt9xOsJltOCdw,17598
|
|
@@ -330,7 +330,7 @@ cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get
|
|
|
330
330
|
cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/llm_interface.py,sha256=126jfQhTEAbmsVsc4wyf20dK-C2AFJQ0sVmNPZFEet0,2194
|
|
331
331
|
cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/rate_limiter.py,sha256=ie_zMYnUzMcW4okP4P41mEC31EML2ztdU7bEQQdg99U,16763
|
|
332
332
|
cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/anthropic/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
333
|
-
cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/anthropic/adapter.py,sha256=
|
|
333
|
+
cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/anthropic/adapter.py,sha256=8KTFmFm9uLagIDTSsZMYjuyhXtmFkbm-YMWVDhrn7qw,3249
|
|
334
334
|
cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/gemini/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
335
335
|
cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/gemini/adapter.py,sha256=maSHU7nEZiR68ZeZW896LhXPm9b1f0rmEYQ6kB4CZMM,5089
|
|
336
336
|
cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/generic_llm_api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -482,7 +482,7 @@ cognee/modules/metrics/operations/__init__.py,sha256=MZ3xbVdfEKqfLct8WnbyFVyZmkB
|
|
|
482
482
|
cognee/modules/metrics/operations/get_pipeline_run_metrics.py,sha256=upIWnzKeJT1_XbL_ABdGxW-Ai7mO3AqMK35BNmItIQQ,2434
|
|
483
483
|
cognee/modules/notebooks/methods/__init__.py,sha256=IhY4fUVPJbuvS83QESsWzjZRC6oC1I-kJi5gr3kPTLk,215
|
|
484
484
|
cognee/modules/notebooks/methods/create_notebook.py,sha256=S41H3Rha0pj9dEKFy1nBG9atTGHhUdOmDZgr0ckUA6M,633
|
|
485
|
-
cognee/modules/notebooks/methods/create_tutorial_notebook.py,sha256=
|
|
485
|
+
cognee/modules/notebooks/methods/create_tutorial_notebook.py,sha256=ZoGilQU993M0j3fFjBicOSsF5TFEq_k8tjbD_90sI7g,4269
|
|
486
486
|
cognee/modules/notebooks/methods/delete_notebook.py,sha256=BKxoRlPzkwXvTYh5WcF-zo_iVmaXqEiptS42JwB0KQU,309
|
|
487
487
|
cognee/modules/notebooks/methods/get_notebook.py,sha256=IP4imsdt9X6GYd6i6WF6PlVhotGNH0i7XZpPqbtqMwo,554
|
|
488
488
|
cognee/modules/notebooks/methods/get_notebooks.py,sha256=ee40ALHvebVORuwZVkQ271qAj260rrYy6eVGxAmfo8c,483
|
|
@@ -579,7 +579,7 @@ cognee/modules/search/exceptions/exceptions.py,sha256=Zc5Y0M-r-UnSSlpKzHKBplfjZ-
|
|
|
579
579
|
cognee/modules/search/methods/__init__.py,sha256=jGfRvNwM5yIzj025gaVhcx7nCupRSXbUUnFjYVjL_Js,27
|
|
580
580
|
cognee/modules/search/methods/get_search_type_tools.py,sha256=wXxOZx3uEnMhRhUO2HGswQ5iVbWvjUj17UT_qdJg6Oo,6837
|
|
581
581
|
cognee/modules/search/methods/no_access_control_search.py,sha256=R08aMgaB8AkD0_XVaX15qLyC9KJ3fSVFv9zeZwuyez4,1566
|
|
582
|
-
cognee/modules/search/methods/search.py,sha256=
|
|
582
|
+
cognee/modules/search/methods/search.py,sha256=JjB9Nhxt_AIDF24z81FWGm7VVJFW90RCXRAU9VhMG34,12430
|
|
583
583
|
cognee/modules/search/models/Query.py,sha256=9WcF5Z1oCFtA4O-7An37eNAPX3iyygO4B5NSwhx7iIg,558
|
|
584
584
|
cognee/modules/search/models/Result.py,sha256=U7QtoNzAtZnUDwGWhjVfcalHQd4daKtYYvJz2BeWQ4w,564
|
|
585
585
|
cognee/modules/search/operations/__init__.py,sha256=AwJl6v9BTpocoefEZLk-flo1EtydYb46NSUoNFHkhX0,156
|
|
@@ -593,8 +593,9 @@ cognee/modules/search/types/SearchResult.py,sha256=blEean6PRFKcDRQugsojZPfH-Wohx
|
|
|
593
593
|
cognee/modules/search/types/SearchType.py,sha256=-lT4bLKKunV4cL4FfF3tjNbdN7X4AsRMLpTkReNwXZM,594
|
|
594
594
|
cognee/modules/search/types/__init__.py,sha256=8k6OjVrL70W1Jh-ClTbG2ETYIhOtSk3tfqjzYgEdPzA,117
|
|
595
595
|
cognee/modules/search/utils/__init__.py,sha256=86mRtCN-B5-2NNChdQoU5x8_8hqTczGZjBoKVE9O7hA,124
|
|
596
|
-
cognee/modules/search/utils/prepare_search_result.py,sha256=
|
|
597
|
-
cognee/modules/search/utils/transform_context_to_graph.py,sha256=
|
|
596
|
+
cognee/modules/search/utils/prepare_search_result.py,sha256=I_NrC6G549mEm1f0JZYJLCxAYQbKXBIzTJB4kv_3538,2334
|
|
597
|
+
cognee/modules/search/utils/transform_context_to_graph.py,sha256=Wl0kZR6YqyBxY-vBNNIy2pPIZaJVCigcRveJWjSX8BA,1238
|
|
598
|
+
cognee/modules/search/utils/transform_insights_to_graph.py,sha256=_ID5-37Ppl7jHbxNkUioZyH_I8SGXnhbfeLHgfEYec8,925
|
|
598
599
|
cognee/modules/settings/__init__.py,sha256=_SZQgCQnnnIHLJuKOMO9uWzXNBQxwYHHMUSBp0qa2uQ,210
|
|
599
600
|
cognee/modules/settings/get_current_settings.py,sha256=R2lOusG5Q2PMa2-2vDndh3Lm7nXyZVkdzTV7vQHT81Y,1642
|
|
600
601
|
cognee/modules/settings/get_settings.py,sha256=qkpNB_-IRexSzaiVvSS7NXG3S3fpbhDb6BQIPGAKET4,4221
|
|
@@ -762,7 +763,7 @@ cognee/tasks/temporal_graph/add_entities_to_event.py,sha256=wH4TlJfGN5_tjouuSFKK
|
|
|
762
763
|
cognee/tasks/temporal_graph/enrich_events.py,sha256=aLwGKzKLdUXbdn4WGN1uK5vOBk8nPzGM6bJ-7lWkt6s,1097
|
|
763
764
|
cognee/tasks/temporal_graph/extract_events_and_entities.py,sha256=iL0ppf5zmTey67yncLPkDY0Fd2GL4CqDGV4v1L0VmoA,1301
|
|
764
765
|
cognee/tasks/temporal_graph/extract_knowledge_graph_from_events.py,sha256=biDjIOnL_6ZSifFokwAlhVqNUixuzoFdYUmPzAT9d1Y,1440
|
|
765
|
-
cognee/tasks/temporal_graph/models.py,sha256=
|
|
766
|
+
cognee/tasks/temporal_graph/models.py,sha256=2fBZWqfZfLNh5BHqU8RbW60R1_IZU3PgY8MZJHlF0S0,1390
|
|
766
767
|
cognee/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
767
768
|
cognee/tests/test_chromadb.py,sha256=D9JEN0xbFxNLgp8UJTVAjpwob9S-LOQC-hSaMVvYhR8,9240
|
|
768
769
|
cognee/tests/test_cognee_server_start.py,sha256=kcIbzu72ZZUlPZ51c_DpSCCwx3X9mNvYZrVcxHfZaJs,4226
|
|
@@ -792,13 +793,13 @@ cognee/tests/test_s3_file_storage.py,sha256=62tvIFyh_uTP0TFF9Ck4Y-sxWPW-cwJKYEJU
|
|
|
792
793
|
cognee/tests/test_search_db.py,sha256=4GpLx8ZJoMjkp-XqQ-LCrkf3NhAM4j_rMmlOFgmDO-A,13420
|
|
793
794
|
cognee/tests/test_starter_pipelines.py,sha256=X1J8RDD0bFMKnRETyi5nyaF4TYdmUIu0EuD3WQwShNs,2475
|
|
794
795
|
cognee/tests/test_telemetry.py,sha256=FIneuVofSKWFYqxNC88sT_P5GPzgfjVyqDCf2TYBE2E,4130
|
|
795
|
-
cognee/tests/test_temporal_graph.py,sha256=
|
|
796
|
+
cognee/tests/test_temporal_graph.py,sha256=GRYS2FsFybYOuoQvmG711UTVAHgvGvapgMEzW4sclZg,11551
|
|
796
797
|
cognee/tests/cli_tests/cli_integration_tests/__init__.py,sha256=xYkvpZkxv_HRWmX71pGM3NUw2KKkDQIM-V6Ehxu-f0I,39
|
|
797
798
|
cognee/tests/cli_tests/cli_integration_tests/test_cli_integration.py,sha256=3hdz1DoGeidJInqbCy1YQte6J0QeQG1_WKGs9utjAFg,11560
|
|
798
799
|
cognee/tests/cli_tests/cli_unit_tests/__init__.py,sha256=U069aFvdwfKPd6YsR_FJML5LRphHHF5wx9mwug1hRh4,32
|
|
799
800
|
cognee/tests/cli_tests/cli_unit_tests/test_cli_commands.py,sha256=5a3vPiSFmKumq6sTfdfMyeUpJGjbZ6_5zX4TUcV0ZJQ,17625
|
|
800
801
|
cognee/tests/cli_tests/cli_unit_tests/test_cli_edge_cases.py,sha256=PyFCnClvbXG1GaiS16qwcuyXXDJ4sRyBCKV5WHrOUxk,23501
|
|
801
|
-
cognee/tests/cli_tests/cli_unit_tests/test_cli_main.py,sha256=
|
|
802
|
+
cognee/tests/cli_tests/cli_unit_tests/test_cli_main.py,sha256=6tx2A4us8uyZ7Zk4wZXplqLn5MtAejxOrG5ZxZpbFvQ,6143
|
|
802
803
|
cognee/tests/cli_tests/cli_unit_tests/test_cli_runner.py,sha256=WZ8oZIlc_JintDq_cnEg9tmLEMZMGFPQGhU7Y_7sfgs,1497
|
|
803
804
|
cognee/tests/cli_tests/cli_unit_tests/test_cli_utils.py,sha256=Flej8LNYRXNkWd2tq8elMm8MkqbhCUb8RtXaPzfNYm4,4323
|
|
804
805
|
cognee/tests/integration/documents/AudioDocument_test.py,sha256=0mJnlWRc7gWqOxAUfdSSIxntcUrzkPXhlsd-MFsiRoM,2790
|
|
@@ -889,9 +890,9 @@ distributed/tasks/queued_add_edges.py,sha256=kz1DHE05y-kNHORQJjYWHUi6Q1QWUp_v3Dl
|
|
|
889
890
|
distributed/tasks/queued_add_nodes.py,sha256=aqK4Ij--ADwUWknxYpiwbYrpa6CcvFfqHWbUZW4Kh3A,452
|
|
890
891
|
distributed/workers/data_point_saving_worker.py,sha256=jFmA0-P_0Ru2IUDrSug0wML-5goAKrGtlBm5BA5Ryw4,3229
|
|
891
892
|
distributed/workers/graph_saving_worker.py,sha256=oUYl99CdhlrPAIsUOHbHnS3d4XhGoV0_OIbCO8wYzRg,3648
|
|
892
|
-
cognee-0.3.
|
|
893
|
-
cognee-0.3.
|
|
894
|
-
cognee-0.3.
|
|
895
|
-
cognee-0.3.
|
|
896
|
-
cognee-0.3.
|
|
897
|
-
cognee-0.3.
|
|
893
|
+
cognee-0.3.4.dev0.dist-info/METADATA,sha256=T3Pt0L4t3GKyziXuy4n1Kdlh3OUfPt4pPh4VDrjVkPY,14752
|
|
894
|
+
cognee-0.3.4.dev0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
895
|
+
cognee-0.3.4.dev0.dist-info/entry_points.txt,sha256=GCCTsNg8gzOJkolq7dR7OK1VlIAO202dGDnMI8nm8oQ,55
|
|
896
|
+
cognee-0.3.4.dev0.dist-info/licenses/LICENSE,sha256=pHHjSQj1DD8SDppW88MMs04TPk7eAanL1c5xj8NY7NQ,11344
|
|
897
|
+
cognee-0.3.4.dev0.dist-info/licenses/NOTICE.md,sha256=6L3saP3kSpcingOxDh-SGjMS8GY79Rlh2dBNLaO0o5c,339
|
|
898
|
+
cognee-0.3.4.dev0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|