cognee 0.2.3.dev0__py3-none-any.whl → 0.2.3.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognee/api/v1/add/add.py +0 -5
- cognee/api/v1/cognify/cognify.py +0 -8
- cognee/api/v1/config/config.py +5 -13
- cognee/api/v1/datasets/routers/get_datasets_router.py +1 -1
- cognee/api/v1/delete/delete.py +1 -1
- cognee/api/v1/exceptions/__init__.py +13 -0
- cognee/api/v1/{delete → exceptions}/exceptions.py +15 -12
- cognee/api/v1/search/search.py +0 -7
- cognee/exceptions/__init__.py +5 -5
- cognee/exceptions/exceptions.py +37 -17
- cognee/infrastructure/data/exceptions/__init__.py +7 -0
- cognee/infrastructure/data/exceptions/exceptions.py +22 -0
- cognee/infrastructure/data/utils/extract_keywords.py +3 -3
- cognee/infrastructure/databases/exceptions/__init__.py +3 -0
- cognee/infrastructure/databases/exceptions/exceptions.py +57 -9
- cognee/infrastructure/databases/graph/neptune_driver/exceptions.py +15 -10
- cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py +2 -2
- cognee/infrastructure/databases/hybrid/neptune_analytics/NeptuneAnalyticsAdapter.py +4 -5
- cognee/infrastructure/databases/vector/chromadb/ChromaDBAdapter.py +2 -2
- cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py +1 -1
- cognee/infrastructure/databases/vector/exceptions/exceptions.py +3 -3
- cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py +2 -2
- cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py +4 -3
- cognee/infrastructure/llm/exceptions.py +30 -2
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/anthropic/adapter.py +2 -2
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/gemini/adapter.py +3 -3
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get_llm_client.py +10 -7
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/openai/adapter.py +6 -4
- cognee/modules/data/exceptions/exceptions.py +18 -5
- cognee/modules/data/methods/delete_data.py +2 -4
- cognee/modules/data/processing/document_types/exceptions/exceptions.py +2 -2
- cognee/modules/graph/cognee_graph/CogneeGraph.py +6 -4
- cognee/modules/graph/cognee_graph/CogneeGraphElements.py +5 -10
- cognee/modules/graph/exceptions/__init__.py +2 -0
- cognee/modules/graph/exceptions/exceptions.py +25 -3
- cognee/modules/ingestion/exceptions/exceptions.py +2 -2
- cognee/modules/ontology/exceptions/exceptions.py +4 -4
- cognee/modules/pipelines/exceptions/exceptions.py +2 -2
- cognee/modules/retrieval/exceptions/exceptions.py +12 -6
- cognee/modules/search/exceptions/__init__.py +7 -0
- cognee/modules/search/exceptions/exceptions.py +15 -0
- cognee/modules/search/methods/search.py +2 -3
- cognee/modules/users/exceptions/exceptions.py +6 -6
- cognee/shared/exceptions/exceptions.py +2 -2
- cognee/tasks/completion/exceptions/exceptions.py +3 -3
- cognee/tasks/documents/classify_documents.py +4 -0
- cognee/tasks/documents/exceptions/__init__.py +11 -0
- cognee/tasks/documents/exceptions/exceptions.py +36 -0
- cognee/tasks/documents/extract_chunks_from_documents.py +8 -2
- cognee/tasks/graph/exceptions/__init__.py +12 -0
- cognee/tasks/graph/exceptions/exceptions.py +41 -0
- cognee/tasks/graph/extract_graph_from_data.py +28 -0
- cognee/tasks/ingestion/exceptions/__init__.py +8 -0
- cognee/tasks/ingestion/exceptions/exceptions.py +12 -0
- cognee/tasks/ingestion/resolve_data_directories.py +5 -0
- cognee/tasks/storage/add_data_points.py +8 -0
- cognee/tasks/storage/exceptions/__init__.py +9 -0
- cognee/tasks/storage/exceptions/exceptions.py +13 -0
- cognee/tasks/storage/index_data_points.py +1 -1
- cognee/tasks/summarization/exceptions/__init__.py +9 -0
- cognee/tasks/summarization/exceptions/exceptions.py +14 -0
- cognee/tasks/summarization/summarize_text.py +8 -1
- cognee/tests/test_delete_by_id.py +1 -1
- cognee/tests/unit/modules/graph/cognee_graph_elements_test.py +5 -5
- cognee/tests/unit/modules/search/search_methods_test.py +2 -2
- {cognee-0.2.3.dev0.dist-info → cognee-0.2.3.dev1.dist-info}/METADATA +1 -1
- {cognee-0.2.3.dev0.dist-info → cognee-0.2.3.dev1.dist-info}/RECORD +70 -56
- cognee/infrastructure/databases/exceptions/EmbeddingException.py +0 -20
- {cognee-0.2.3.dev0.dist-info → cognee-0.2.3.dev1.dist-info}/WHEEL +0 -0
- {cognee-0.2.3.dev0.dist-info → cognee-0.2.3.dev1.dist-info}/licenses/LICENSE +0 -0
- {cognee-0.2.3.dev0.dist-info → cognee-0.2.3.dev1.dist-info}/licenses/NOTICE.md +0 -0
|
@@ -10,6 +10,7 @@ from cognee.modules.data.processing.document_types import (
|
|
|
10
10
|
)
|
|
11
11
|
from cognee.modules.engine.models.node_set import NodeSet
|
|
12
12
|
from cognee.modules.engine.utils.generate_node_id import generate_node_id
|
|
13
|
+
from cognee.tasks.documents.exceptions import WrongDataDocumentInputError
|
|
13
14
|
|
|
14
15
|
EXTENSION_TO_DOCUMENT_CLASS = {
|
|
15
16
|
"pdf": PdfDocument, # Text documents
|
|
@@ -111,6 +112,9 @@ async def classify_documents(data_documents: list[Data]) -> list[Document]:
|
|
|
111
112
|
- list[Document]: A list of Document objects created based on the classified data
|
|
112
113
|
documents.
|
|
113
114
|
"""
|
|
115
|
+
if not isinstance(data_documents, list):
|
|
116
|
+
raise WrongDataDocumentInputError("data_documents")
|
|
117
|
+
|
|
114
118
|
documents = []
|
|
115
119
|
for data_item in data_documents:
|
|
116
120
|
document = EXTENSION_TO_DOCUMENT_CLASS[data_item.extension](
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
from cognee.exceptions import (
|
|
2
|
+
CogneeValidationError,
|
|
3
|
+
CogneeConfigurationError,
|
|
4
|
+
)
|
|
5
|
+
from fastapi import status
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class WrongDataDocumentInputError(CogneeValidationError):
|
|
9
|
+
"""Raised when a wrong data document is provided."""
|
|
10
|
+
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
field: str,
|
|
14
|
+
name: str = "WrongDataDocumentInputError",
|
|
15
|
+
status_code: int = status.HTTP_422_UNPROCESSABLE_ENTITY,
|
|
16
|
+
):
|
|
17
|
+
message = f"Missing of invalid parameter: '{field}'."
|
|
18
|
+
super().__init__(message, name, status_code)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class InvalidChunkSizeError(CogneeValidationError):
|
|
22
|
+
def __init__(self, value):
|
|
23
|
+
super().__init__(
|
|
24
|
+
message=f"max_chunk_size must be a positive integer (got {value}).",
|
|
25
|
+
name="InvalidChunkSizeError",
|
|
26
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class InvalidChunkerError(CogneeValidationError):
|
|
31
|
+
def __init__(self):
|
|
32
|
+
super().__init__(
|
|
33
|
+
message="chunker must be a valid Chunker class.",
|
|
34
|
+
name="InvalidChunkerError",
|
|
35
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
36
|
+
)
|
|
@@ -8,6 +8,7 @@ from cognee.modules.data.models import Data
|
|
|
8
8
|
from cognee.infrastructure.databases.relational import get_relational_engine
|
|
9
9
|
from cognee.modules.chunking.TextChunker import TextChunker
|
|
10
10
|
from cognee.modules.chunking.Chunker import Chunker
|
|
11
|
+
from cognee.tasks.documents.exceptions import InvalidChunkSizeError, InvalidChunkerError
|
|
11
12
|
|
|
12
13
|
|
|
13
14
|
async def update_document_token_count(document_id: UUID, token_count: int) -> None:
|
|
@@ -37,6 +38,13 @@ async def extract_chunks_from_documents(
|
|
|
37
38
|
- The `read` method of the `Document` class must be implemented to support the chunking operation.
|
|
38
39
|
- The `chunker` parameter determines the chunking logic and should align with the document type.
|
|
39
40
|
"""
|
|
41
|
+
if not isinstance(max_chunk_size, int) or max_chunk_size <= 0:
|
|
42
|
+
raise InvalidChunkSizeError(max_chunk_size)
|
|
43
|
+
if not isinstance(chunker, type):
|
|
44
|
+
raise InvalidChunkerError()
|
|
45
|
+
if not hasattr(chunker, "read"):
|
|
46
|
+
raise InvalidChunkerError()
|
|
47
|
+
|
|
40
48
|
for document in documents:
|
|
41
49
|
document_token_count = 0
|
|
42
50
|
|
|
@@ -48,5 +56,3 @@ async def extract_chunks_from_documents(
|
|
|
48
56
|
yield document_chunk
|
|
49
57
|
|
|
50
58
|
await update_document_token_count(document.id, document_token_count)
|
|
51
|
-
|
|
52
|
-
# todo rita
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Custom exceptions for the Cognee API.
|
|
3
|
+
|
|
4
|
+
This module defines a set of exceptions for handling various data errors
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from .exceptions import (
|
|
8
|
+
InvalidDataChunksError,
|
|
9
|
+
InvalidGraphModelError,
|
|
10
|
+
InvalidOntologyAdapterError,
|
|
11
|
+
InvalidChunkGraphInputError,
|
|
12
|
+
)
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from cognee.exceptions import (
|
|
2
|
+
CogneeValidationError,
|
|
3
|
+
CogneeConfigurationError,
|
|
4
|
+
)
|
|
5
|
+
from fastapi import status
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class InvalidDataChunksError(CogneeValidationError):
|
|
9
|
+
def __init__(self, detail: str):
|
|
10
|
+
super().__init__(
|
|
11
|
+
message=f"Invalid data_chunks: {detail}",
|
|
12
|
+
name="InvalidDataChunksError",
|
|
13
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class InvalidGraphModelError(CogneeValidationError):
|
|
18
|
+
def __init__(self, got):
|
|
19
|
+
super().__init__(
|
|
20
|
+
message=f"graph_model must be a subclass of BaseModel (got {got}).",
|
|
21
|
+
name="InvalidGraphModelError",
|
|
22
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class InvalidOntologyAdapterError(CogneeConfigurationError):
|
|
27
|
+
def __init__(self, got):
|
|
28
|
+
super().__init__(
|
|
29
|
+
message=f"ontology_adapter lacks required interface (got {got}).",
|
|
30
|
+
name="InvalidOntologyAdapterError",
|
|
31
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class InvalidChunkGraphInputError(CogneeValidationError):
|
|
36
|
+
def __init__(self, detail: str):
|
|
37
|
+
super().__init__(
|
|
38
|
+
message=f"Invalid chunk inputs or LLM Chunkgraphs: {detail}",
|
|
39
|
+
name="InvalidChunkGraphInputError",
|
|
40
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
41
|
+
)
|
|
@@ -12,6 +12,12 @@ from cognee.modules.graph.utils import (
|
|
|
12
12
|
)
|
|
13
13
|
from cognee.shared.data_models import KnowledgeGraph
|
|
14
14
|
from cognee.infrastructure.llm.LLMGateway import LLMGateway
|
|
15
|
+
from cognee.tasks.graph.exceptions import (
|
|
16
|
+
InvalidGraphModelError,
|
|
17
|
+
InvalidDataChunksError,
|
|
18
|
+
InvalidChunkGraphInputError,
|
|
19
|
+
InvalidOntologyAdapterError,
|
|
20
|
+
)
|
|
15
21
|
|
|
16
22
|
|
|
17
23
|
async def integrate_chunk_graphs(
|
|
@@ -21,6 +27,20 @@ async def integrate_chunk_graphs(
|
|
|
21
27
|
ontology_adapter: OntologyResolver,
|
|
22
28
|
) -> List[DocumentChunk]:
|
|
23
29
|
"""Updates DocumentChunk objects, integrates data points and edges into databases."""
|
|
30
|
+
|
|
31
|
+
if not isinstance(data_chunks, list) or not isinstance(chunk_graphs, list):
|
|
32
|
+
raise InvalidChunkGraphInputError("data_chunks and chunk_graphs must be lists.")
|
|
33
|
+
if len(data_chunks) != len(chunk_graphs):
|
|
34
|
+
raise InvalidChunkGraphInputError(
|
|
35
|
+
f"length mismatch: {len(data_chunks)} chunks vs {len(chunk_graphs)} graphs."
|
|
36
|
+
)
|
|
37
|
+
if not isinstance(graph_model, type) or not issubclass(graph_model, BaseModel):
|
|
38
|
+
raise InvalidGraphModelError(graph_model)
|
|
39
|
+
if ontology_adapter is None or not hasattr(ontology_adapter, "get_subgraph"):
|
|
40
|
+
raise InvalidOntologyAdapterError(
|
|
41
|
+
type(ontology_adapter).__name__ if ontology_adapter else "None"
|
|
42
|
+
)
|
|
43
|
+
|
|
24
44
|
graph_engine = await get_graph_engine()
|
|
25
45
|
|
|
26
46
|
if graph_model is not KnowledgeGraph:
|
|
@@ -55,6 +75,14 @@ async def extract_graph_from_data(
|
|
|
55
75
|
"""
|
|
56
76
|
Extracts and integrates a knowledge graph from the text content of document chunks using a specified graph model.
|
|
57
77
|
"""
|
|
78
|
+
|
|
79
|
+
if not isinstance(data_chunks, list) or not data_chunks:
|
|
80
|
+
raise InvalidDataChunksError("must be a non-empty list of DocumentChunk.")
|
|
81
|
+
if not all(hasattr(c, "text") for c in data_chunks):
|
|
82
|
+
raise InvalidDataChunksError("each chunk must have a 'text' attribute")
|
|
83
|
+
if not isinstance(graph_model, type) or not issubclass(graph_model, BaseModel):
|
|
84
|
+
raise InvalidGraphModelError(graph_model)
|
|
85
|
+
|
|
58
86
|
chunk_graphs = await asyncio.gather(
|
|
59
87
|
*[LLMGateway.extract_content_graph(chunk.text, graph_model) for chunk in data_chunks]
|
|
60
88
|
)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from cognee.exceptions import CogneeSystemError
|
|
2
|
+
from fastapi import status
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class S3FileSystemNotFoundError(CogneeSystemError):
|
|
6
|
+
def __init__(
|
|
7
|
+
self,
|
|
8
|
+
name: str = "S3FileSystemNotFoundError",
|
|
9
|
+
status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
10
|
+
):
|
|
11
|
+
message = "Could not find S3FileSystem."
|
|
12
|
+
super().__init__(message, name, status_code)
|
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
import os
|
|
2
2
|
from urllib.parse import urlparse
|
|
3
3
|
from typing import List, Union, BinaryIO
|
|
4
|
+
|
|
5
|
+
from cognee.tasks.ingestion.exceptions import S3FileSystemNotFoundError
|
|
6
|
+
from cognee.exceptions import CogneeSystemError
|
|
4
7
|
from cognee.infrastructure.files.storage.s3_config import get_s3_config
|
|
5
8
|
|
|
6
9
|
|
|
@@ -54,6 +57,8 @@ async def resolve_data_directories(
|
|
|
54
57
|
else:
|
|
55
58
|
s3_files.append(key)
|
|
56
59
|
resolved_data.extend(s3_files)
|
|
60
|
+
else:
|
|
61
|
+
raise S3FileSystemNotFoundError()
|
|
57
62
|
|
|
58
63
|
elif os.path.isdir(item): # If it's a directory
|
|
59
64
|
if include_subdirectories:
|
|
@@ -5,9 +5,17 @@ from cognee.infrastructure.databases.graph import get_graph_engine
|
|
|
5
5
|
from cognee.modules.graph.utils import deduplicate_nodes_and_edges, get_graph_from_model
|
|
6
6
|
from .index_data_points import index_data_points
|
|
7
7
|
from .index_graph_edges import index_graph_edges
|
|
8
|
+
from cognee.tasks.storage.exceptions import (
|
|
9
|
+
InvalidDataPointsInAddDataPointsError,
|
|
10
|
+
)
|
|
8
11
|
|
|
9
12
|
|
|
10
13
|
async def add_data_points(data_points: List[DataPoint]) -> List[DataPoint]:
|
|
14
|
+
if not isinstance(data_points, list):
|
|
15
|
+
raise InvalidDataPointsInAddDataPointsError("data_points must be a list.")
|
|
16
|
+
if not all(isinstance(dp, DataPoint) for dp in data_points):
|
|
17
|
+
raise InvalidDataPointsInAddDataPointsError("data_points: each item must be a DataPoint.")
|
|
18
|
+
|
|
11
19
|
nodes = []
|
|
12
20
|
edges = []
|
|
13
21
|
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from cognee.exceptions import (
|
|
2
|
+
CogneeValidationError,
|
|
3
|
+
)
|
|
4
|
+
from fastapi import status
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class InvalidDataPointsInAddDataPointsError(CogneeValidationError):
|
|
8
|
+
def __init__(self, detail: str):
|
|
9
|
+
super().__init__(
|
|
10
|
+
message=f"Invalid data_points: {detail}",
|
|
11
|
+
name="InvalidDataPointsInAddDataPointsError",
|
|
12
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
13
|
+
)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from cognee.shared.logging_utils import get_logger
|
|
2
2
|
|
|
3
|
-
from cognee.infrastructure.databases.exceptions
|
|
3
|
+
from cognee.infrastructure.databases.exceptions import EmbeddingException
|
|
4
4
|
from cognee.infrastructure.databases.vector import get_vector_engine
|
|
5
5
|
from cognee.infrastructure.engine import DataPoint
|
|
6
6
|
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from cognee.exceptions import (
|
|
2
|
+
CogneeValidationError,
|
|
3
|
+
CogneeConfigurationError,
|
|
4
|
+
)
|
|
5
|
+
from fastapi import status
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class InvalidSummaryInputsError(CogneeValidationError):
|
|
9
|
+
def __init__(self, detail: str):
|
|
10
|
+
super().__init__(
|
|
11
|
+
message=f"Invalid summarize_text inputs: {detail}",
|
|
12
|
+
name="InvalidSummaryInputsError",
|
|
13
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
14
|
+
)
|
|
@@ -3,10 +3,11 @@ from typing import Type
|
|
|
3
3
|
from uuid import uuid5
|
|
4
4
|
from pydantic import BaseModel
|
|
5
5
|
|
|
6
|
+
from cognee.tasks.summarization.exceptions import InvalidSummaryInputsError
|
|
6
7
|
from cognee.modules.chunking.models.DocumentChunk import DocumentChunk
|
|
7
8
|
from cognee.infrastructure.llm.LLMGateway import LLMGateway
|
|
8
9
|
from cognee.modules.cognify.config import get_cognify_config
|
|
9
|
-
from .models import TextSummary
|
|
10
|
+
from cognee.tasks.summarization.models import TextSummary
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
async def summarize_text(
|
|
@@ -35,6 +36,12 @@ async def summarize_text(
|
|
|
35
36
|
A list of TextSummary objects, each containing the summary of a corresponding
|
|
36
37
|
DocumentChunk.
|
|
37
38
|
"""
|
|
39
|
+
|
|
40
|
+
if not isinstance(data_chunks, list):
|
|
41
|
+
raise InvalidSummaryInputsError("data_chunks must be a list.")
|
|
42
|
+
if not all(hasattr(c, "text") for c in data_chunks):
|
|
43
|
+
raise InvalidSummaryInputsError("each DocumentChunk must have a 'text' attribute.")
|
|
44
|
+
|
|
38
45
|
if len(data_chunks) == 0:
|
|
39
46
|
return data_chunks
|
|
40
47
|
|
|
@@ -7,7 +7,7 @@ from cognee.shared.logging_utils import get_logger
|
|
|
7
7
|
from cognee.modules.users.methods import get_default_user, create_user
|
|
8
8
|
from cognee.modules.users.permissions.methods import authorized_give_permission_on_datasets
|
|
9
9
|
from cognee.modules.data.methods import get_dataset_data, get_datasets_by_name
|
|
10
|
-
from cognee.api.v1.
|
|
10
|
+
from cognee.api.v1.exceptions import DocumentNotFoundError, DatasetNotFoundError
|
|
11
11
|
|
|
12
12
|
logger = get_logger()
|
|
13
13
|
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import numpy as np
|
|
2
2
|
import pytest
|
|
3
3
|
|
|
4
|
-
from cognee.exceptions import InvalidValueError
|
|
5
4
|
from cognee.modules.graph.cognee_graph.CogneeGraphElements import Edge, Node
|
|
5
|
+
from cognee.modules.graph.exceptions import InvalidDimensionsError, DimensionOutOfRangeError
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
def test_node_initialization():
|
|
@@ -16,7 +16,7 @@ def test_node_initialization():
|
|
|
16
16
|
|
|
17
17
|
def test_node_invalid_dimension():
|
|
18
18
|
"""Test that initializing a Node with a non-positive dimension raises an error."""
|
|
19
|
-
with pytest.raises(
|
|
19
|
+
with pytest.raises(InvalidDimensionsError):
|
|
20
20
|
Node("node1", dimension=0)
|
|
21
21
|
|
|
22
22
|
|
|
@@ -69,7 +69,7 @@ def test_is_node_alive_in_dimension():
|
|
|
69
69
|
def test_node_alive_invalid_dimension():
|
|
70
70
|
"""Test that checking alive status with an invalid dimension raises an error."""
|
|
71
71
|
node = Node("node1", dimension=1)
|
|
72
|
-
with pytest.raises(
|
|
72
|
+
with pytest.raises(DimensionOutOfRangeError):
|
|
73
73
|
node.is_node_alive_in_dimension(1)
|
|
74
74
|
|
|
75
75
|
|
|
@@ -106,7 +106,7 @@ def test_edge_invalid_dimension():
|
|
|
106
106
|
"""Test that initializing an Edge with a non-positive dimension raises an error."""
|
|
107
107
|
node1 = Node("node1")
|
|
108
108
|
node2 = Node("node2")
|
|
109
|
-
with pytest.raises(
|
|
109
|
+
with pytest.raises(InvalidDimensionsError):
|
|
110
110
|
Edge(node1, node2, dimension=0)
|
|
111
111
|
|
|
112
112
|
|
|
@@ -125,7 +125,7 @@ def test_edge_alive_invalid_dimension():
|
|
|
125
125
|
node1 = Node("node1")
|
|
126
126
|
node2 = Node("node2")
|
|
127
127
|
edge = Edge(node1, node2, dimension=1)
|
|
128
|
-
with pytest.raises(
|
|
128
|
+
with pytest.raises(DimensionOutOfRangeError):
|
|
129
129
|
edge.is_edge_alive_in_dimension(1)
|
|
130
130
|
|
|
131
131
|
|
|
@@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock, patch
|
|
|
5
5
|
import pytest
|
|
6
6
|
from pylint.checkers.utils import node_type
|
|
7
7
|
|
|
8
|
-
from cognee.exceptions import
|
|
8
|
+
from cognee.modules.search.exceptions import UnsupportedSearchTypeError
|
|
9
9
|
from cognee.modules.search.methods.search import search, specific_search
|
|
10
10
|
from cognee.modules.search.types import SearchType
|
|
11
11
|
from cognee.modules.users.models import User
|
|
@@ -217,7 +217,7 @@ async def test_specific_search_invalid_type(mock_user):
|
|
|
217
217
|
query_type = "INVALID_TYPE" # Not a valid SearchType
|
|
218
218
|
|
|
219
219
|
# Execute and verify
|
|
220
|
-
with pytest.raises(
|
|
220
|
+
with pytest.raises(UnsupportedSearchTypeError) as excinfo:
|
|
221
221
|
await specific_search(query_type, query, mock_user)
|
|
222
222
|
|
|
223
223
|
assert "Unsupported search type" in str(excinfo.value)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cognee
|
|
3
|
-
Version: 0.2.3.
|
|
3
|
+
Version: 0.2.3.dev1
|
|
4
4
|
Summary: Cognee - is a library for enriching LLM context with a semantic layer for better understanding and reasoning.
|
|
5
5
|
Project-URL: Homepage, https://www.cognee.ai
|
|
6
6
|
Project-URL: Repository, https://github.com/topoteretes/cognee
|