cognee 0.2.3.dev0__py3-none-any.whl → 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognee/__main__.py +4 -0
- cognee/api/v1/add/add.py +18 -11
- cognee/api/v1/cognify/code_graph_pipeline.py +7 -1
- cognee/api/v1/cognify/cognify.py +22 -115
- cognee/api/v1/cognify/routers/get_cognify_router.py +11 -3
- cognee/api/v1/config/config.py +5 -13
- cognee/api/v1/datasets/routers/get_datasets_router.py +2 -2
- cognee/api/v1/delete/delete.py +1 -1
- cognee/api/v1/exceptions/__init__.py +13 -0
- cognee/api/v1/{delete → exceptions}/exceptions.py +15 -12
- cognee/api/v1/responses/default_tools.py +4 -0
- cognee/api/v1/responses/dispatch_function.py +6 -1
- cognee/api/v1/responses/models.py +1 -1
- cognee/api/v1/search/search.py +6 -7
- cognee/cli/__init__.py +10 -0
- cognee/cli/_cognee.py +180 -0
- cognee/cli/commands/__init__.py +1 -0
- cognee/cli/commands/add_command.py +80 -0
- cognee/cli/commands/cognify_command.py +128 -0
- cognee/cli/commands/config_command.py +225 -0
- cognee/cli/commands/delete_command.py +80 -0
- cognee/cli/commands/search_command.py +149 -0
- cognee/cli/config.py +33 -0
- cognee/cli/debug.py +21 -0
- cognee/cli/echo.py +45 -0
- cognee/cli/exceptions.py +23 -0
- cognee/cli/minimal_cli.py +97 -0
- cognee/cli/reference.py +26 -0
- cognee/cli/suppress_logging.py +12 -0
- cognee/eval_framework/corpus_builder/corpus_builder_executor.py +2 -2
- cognee/eval_framework/eval_config.py +1 -1
- cognee/exceptions/__init__.py +5 -5
- cognee/exceptions/exceptions.py +37 -17
- cognee/infrastructure/data/exceptions/__init__.py +7 -0
- cognee/infrastructure/data/exceptions/exceptions.py +22 -0
- cognee/infrastructure/data/utils/extract_keywords.py +3 -3
- cognee/infrastructure/databases/exceptions/__init__.py +3 -0
- cognee/infrastructure/databases/exceptions/exceptions.py +57 -9
- cognee/infrastructure/databases/graph/get_graph_engine.py +4 -9
- cognee/infrastructure/databases/graph/kuzu/adapter.py +64 -2
- cognee/infrastructure/databases/graph/neo4j_driver/adapter.py +49 -0
- cognee/infrastructure/databases/graph/neptune_driver/exceptions.py +15 -10
- cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py +2 -2
- cognee/infrastructure/databases/hybrid/neptune_analytics/NeptuneAnalyticsAdapter.py +4 -5
- cognee/infrastructure/databases/vector/chromadb/ChromaDBAdapter.py +2 -2
- cognee/infrastructure/databases/vector/embeddings/FastembedEmbeddingEngine.py +5 -3
- cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py +17 -8
- cognee/infrastructure/databases/vector/embeddings/OllamaEmbeddingEngine.py +5 -5
- cognee/infrastructure/databases/vector/embeddings/config.py +2 -2
- cognee/infrastructure/databases/vector/embeddings/get_embedding_engine.py +6 -6
- cognee/infrastructure/databases/vector/exceptions/exceptions.py +3 -3
- cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py +2 -2
- cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py +4 -3
- cognee/infrastructure/files/utils/get_data_file_path.py +14 -9
- cognee/infrastructure/files/utils/get_file_metadata.py +2 -1
- cognee/infrastructure/llm/LLMGateway.py +14 -5
- cognee/infrastructure/llm/config.py +5 -5
- cognee/infrastructure/llm/exceptions.py +30 -2
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extraction/knowledge_graph/extract_content_graph.py +16 -5
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/extraction/knowledge_graph/extract_content_graph.py +19 -15
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/anthropic/adapter.py +5 -5
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/gemini/adapter.py +6 -6
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/generic_llm_api/adapter.py +2 -2
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get_llm_client.py +24 -15
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/ollama/adapter.py +6 -4
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/openai/adapter.py +9 -7
- cognee/infrastructure/llm/tokenizer/Gemini/adapter.py +2 -2
- cognee/infrastructure/llm/tokenizer/HuggingFace/adapter.py +3 -3
- cognee/infrastructure/llm/tokenizer/Mistral/adapter.py +3 -3
- cognee/infrastructure/llm/tokenizer/TikToken/adapter.py +6 -6
- cognee/infrastructure/llm/utils.py +7 -7
- cognee/modules/data/exceptions/exceptions.py +18 -5
- cognee/modules/data/methods/__init__.py +2 -0
- cognee/modules/data/methods/create_authorized_dataset.py +19 -0
- cognee/modules/data/methods/delete_data.py +2 -4
- cognee/modules/data/methods/get_authorized_dataset.py +11 -5
- cognee/modules/data/methods/get_authorized_dataset_by_name.py +16 -0
- cognee/modules/data/methods/load_or_create_datasets.py +2 -20
- cognee/modules/data/processing/document_types/exceptions/exceptions.py +2 -2
- cognee/modules/graph/cognee_graph/CogneeGraph.py +6 -4
- cognee/modules/graph/cognee_graph/CogneeGraphElements.py +5 -10
- cognee/modules/graph/exceptions/__init__.py +2 -0
- cognee/modules/graph/exceptions/exceptions.py +25 -3
- cognee/modules/graph/methods/get_formatted_graph_data.py +3 -2
- cognee/modules/ingestion/exceptions/exceptions.py +2 -2
- cognee/modules/ontology/exceptions/exceptions.py +4 -4
- cognee/modules/pipelines/__init__.py +1 -1
- cognee/modules/pipelines/exceptions/exceptions.py +2 -2
- cognee/modules/pipelines/exceptions/tasks.py +18 -0
- cognee/modules/pipelines/layers/__init__.py +1 -0
- cognee/modules/pipelines/layers/check_pipeline_run_qualification.py +59 -0
- cognee/modules/pipelines/layers/pipeline_execution_mode.py +127 -0
- cognee/modules/pipelines/layers/reset_dataset_pipeline_run_status.py +12 -0
- cognee/modules/pipelines/layers/resolve_authorized_user_dataset.py +34 -0
- cognee/modules/pipelines/layers/resolve_authorized_user_datasets.py +55 -0
- cognee/modules/pipelines/layers/setup_and_check_environment.py +41 -0
- cognee/modules/pipelines/layers/validate_pipeline_tasks.py +20 -0
- cognee/modules/pipelines/methods/__init__.py +2 -0
- cognee/modules/pipelines/methods/get_pipeline_runs_by_dataset.py +34 -0
- cognee/modules/pipelines/methods/reset_pipeline_run_status.py +16 -0
- cognee/modules/pipelines/operations/__init__.py +0 -1
- cognee/modules/pipelines/operations/log_pipeline_run_initiated.py +1 -1
- cognee/modules/pipelines/operations/pipeline.py +23 -138
- cognee/modules/retrieval/base_feedback.py +11 -0
- cognee/modules/retrieval/cypher_search_retriever.py +1 -9
- cognee/modules/retrieval/exceptions/exceptions.py +12 -6
- cognee/modules/retrieval/graph_completion_context_extension_retriever.py +9 -2
- cognee/modules/retrieval/graph_completion_cot_retriever.py +13 -6
- cognee/modules/retrieval/graph_completion_retriever.py +89 -5
- cognee/modules/retrieval/graph_summary_completion_retriever.py +2 -0
- cognee/modules/retrieval/natural_language_retriever.py +0 -4
- cognee/modules/retrieval/user_qa_feedback.py +83 -0
- cognee/modules/retrieval/utils/extract_uuid_from_node.py +18 -0
- cognee/modules/retrieval/utils/models.py +40 -0
- cognee/modules/search/exceptions/__init__.py +7 -0
- cognee/modules/search/exceptions/exceptions.py +15 -0
- cognee/modules/search/methods/search.py +47 -7
- cognee/modules/search/types/SearchType.py +1 -0
- cognee/modules/settings/get_settings.py +2 -2
- cognee/modules/users/exceptions/exceptions.py +6 -6
- cognee/shared/CodeGraphEntities.py +1 -0
- cognee/shared/exceptions/exceptions.py +2 -2
- cognee/shared/logging_utils.py +142 -31
- cognee/shared/utils.py +0 -1
- cognee/tasks/completion/exceptions/exceptions.py +3 -3
- cognee/tasks/documents/classify_documents.py +4 -0
- cognee/tasks/documents/exceptions/__init__.py +11 -0
- cognee/tasks/documents/exceptions/exceptions.py +36 -0
- cognee/tasks/documents/extract_chunks_from_documents.py +8 -2
- cognee/tasks/graph/exceptions/__init__.py +12 -0
- cognee/tasks/graph/exceptions/exceptions.py +41 -0
- cognee/tasks/graph/extract_graph_from_data.py +34 -2
- cognee/tasks/ingestion/exceptions/__init__.py +8 -0
- cognee/tasks/ingestion/exceptions/exceptions.py +12 -0
- cognee/tasks/ingestion/resolve_data_directories.py +5 -0
- cognee/tasks/repo_processor/get_local_dependencies.py +2 -0
- cognee/tasks/repo_processor/get_repo_file_dependencies.py +120 -48
- cognee/tasks/storage/add_data_points.py +41 -3
- cognee/tasks/storage/exceptions/__init__.py +9 -0
- cognee/tasks/storage/exceptions/exceptions.py +13 -0
- cognee/tasks/storage/index_data_points.py +1 -1
- cognee/tasks/summarization/exceptions/__init__.py +9 -0
- cognee/tasks/summarization/exceptions/exceptions.py +14 -0
- cognee/tasks/summarization/summarize_text.py +8 -1
- cognee/tests/integration/cli/__init__.py +3 -0
- cognee/tests/integration/cli/test_cli_integration.py +331 -0
- cognee/tests/integration/documents/PdfDocument_test.py +2 -2
- cognee/tests/integration/documents/TextDocument_test.py +2 -4
- cognee/tests/integration/documents/UnstructuredDocument_test.py +5 -8
- cognee/tests/test_delete_by_id.py +1 -1
- cognee/tests/{test_deletion.py → test_delete_hard.py} +0 -37
- cognee/tests/test_delete_soft.py +85 -0
- cognee/tests/test_kuzu.py +2 -2
- cognee/tests/test_neo4j.py +2 -2
- cognee/tests/test_search_db.py +126 -7
- cognee/tests/unit/cli/__init__.py +3 -0
- cognee/tests/unit/cli/test_cli_commands.py +483 -0
- cognee/tests/unit/cli/test_cli_edge_cases.py +625 -0
- cognee/tests/unit/cli/test_cli_main.py +173 -0
- cognee/tests/unit/cli/test_cli_runner.py +62 -0
- cognee/tests/unit/cli/test_cli_utils.py +127 -0
- cognee/tests/unit/modules/graph/cognee_graph_elements_test.py +5 -5
- cognee/tests/unit/modules/retrieval/graph_completion_retriever_context_extension_test.py +3 -3
- cognee/tests/unit/modules/retrieval/graph_completion_retriever_cot_test.py +3 -3
- cognee/tests/unit/modules/retrieval/graph_completion_retriever_test.py +3 -3
- cognee/tests/unit/modules/search/search_methods_test.py +4 -2
- {cognee-0.2.3.dev0.dist-info → cognee-0.2.4.dist-info}/METADATA +7 -5
- {cognee-0.2.3.dev0.dist-info → cognee-0.2.4.dist-info}/RECORD +172 -121
- cognee-0.2.4.dist-info/entry_points.txt +2 -0
- cognee/infrastructure/databases/exceptions/EmbeddingException.py +0 -20
- cognee/infrastructure/databases/graph/networkx/__init__.py +0 -0
- cognee/infrastructure/databases/graph/networkx/adapter.py +0 -1017
- cognee/infrastructure/pipeline/models/Operation.py +0 -60
- cognee/infrastructure/pipeline/models/__init__.py +0 -0
- cognee/notebooks/github_analysis_step_by_step.ipynb +0 -37
- cognee/tests/tasks/descriptive_metrics/networkx_metrics_test.py +0 -7
- {cognee-0.2.3.dev0.dist-info → cognee-0.2.4.dist-info}/WHEEL +0 -0
- {cognee-0.2.3.dev0.dist-info → cognee-0.2.4.dist-info}/licenses/LICENSE +0 -0
- {cognee-0.2.3.dev0.dist-info → cognee-0.2.4.dist-info}/licenses/NOTICE.md +0 -0
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
from fastapi import status
|
|
2
|
-
from cognee.exceptions import
|
|
2
|
+
from cognee.exceptions import CogneeSystemError, CogneeValidationError, CogneeConfigurationError
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
class DatabaseNotCreatedError(
|
|
5
|
+
class DatabaseNotCreatedError(CogneeSystemError):
|
|
6
6
|
"""
|
|
7
7
|
Represents an error indicating that the database has not been created. This error should
|
|
8
8
|
be raised when an attempt is made to access the database before it has been initialized.
|
|
9
9
|
|
|
10
|
-
Inherits from
|
|
10
|
+
Inherits from CogneeSystemError. Overrides the constructor to include a default message and
|
|
11
11
|
status code.
|
|
12
12
|
"""
|
|
13
13
|
|
|
@@ -20,10 +20,10 @@ class DatabaseNotCreatedError(CriticalError):
|
|
|
20
20
|
super().__init__(message, name, status_code)
|
|
21
21
|
|
|
22
22
|
|
|
23
|
-
class EntityNotFoundError(
|
|
23
|
+
class EntityNotFoundError(CogneeValidationError):
|
|
24
24
|
"""
|
|
25
25
|
Represents an error when a requested entity is not found in the database. This class
|
|
26
|
-
inherits from
|
|
26
|
+
inherits from CogneeValidationError.
|
|
27
27
|
|
|
28
28
|
Public methods:
|
|
29
29
|
|
|
@@ -49,11 +49,11 @@ class EntityNotFoundError(CogneeApiError):
|
|
|
49
49
|
# super().__init__(message, name, status_code) :TODO: This is not an error anymore with the dynamic exception handling therefore we shouldn't log error
|
|
50
50
|
|
|
51
51
|
|
|
52
|
-
class EntityAlreadyExistsError(
|
|
52
|
+
class EntityAlreadyExistsError(CogneeValidationError):
|
|
53
53
|
"""
|
|
54
54
|
Represents an error when an entity creation is attempted but the entity already exists.
|
|
55
55
|
|
|
56
|
-
This class is derived from
|
|
56
|
+
This class is derived from CogneeValidationError and is used to signal a conflict in operations
|
|
57
57
|
involving resource creation.
|
|
58
58
|
"""
|
|
59
59
|
|
|
@@ -66,11 +66,11 @@ class EntityAlreadyExistsError(CogneeApiError):
|
|
|
66
66
|
super().__init__(message, name, status_code)
|
|
67
67
|
|
|
68
68
|
|
|
69
|
-
class NodesetFilterNotSupportedError(
|
|
69
|
+
class NodesetFilterNotSupportedError(CogneeConfigurationError):
|
|
70
70
|
"""
|
|
71
71
|
Raise an exception when a nodeset filter is not supported by the current database.
|
|
72
72
|
|
|
73
|
-
This exception inherits from `
|
|
73
|
+
This exception inherits from `CogneeConfigurationError` and is designed to provide information
|
|
74
74
|
about the specific issue of unsupported nodeset filters in the context of graph
|
|
75
75
|
databases.
|
|
76
76
|
"""
|
|
@@ -84,3 +84,51 @@ class NodesetFilterNotSupportedError(CogneeApiError):
|
|
|
84
84
|
self.message = message
|
|
85
85
|
self.name = name
|
|
86
86
|
self.status_code = status_code
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
class EmbeddingException(CogneeConfigurationError):
|
|
90
|
+
"""
|
|
91
|
+
Custom exception for handling embedding-related errors.
|
|
92
|
+
|
|
93
|
+
This exception class is designed to indicate issues specifically related to embeddings
|
|
94
|
+
within the application. It extends the base exception class CogneeConfigurationError allows
|
|
95
|
+
for customization of the error message, name, and status code.
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
def __init__(
|
|
99
|
+
self,
|
|
100
|
+
message: str = "Embedding Exception.",
|
|
101
|
+
name: str = "EmbeddingException",
|
|
102
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
|
103
|
+
):
|
|
104
|
+
super().__init__(message, name, status_code)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class MissingQueryParameterError(CogneeValidationError):
|
|
108
|
+
"""
|
|
109
|
+
Raised when neither 'query_text' nor 'query_vector' is provided,
|
|
110
|
+
and at least one is required to perform the operation.
|
|
111
|
+
"""
|
|
112
|
+
|
|
113
|
+
def __init__(
|
|
114
|
+
self,
|
|
115
|
+
name: str = "MissingQueryParameterError",
|
|
116
|
+
status_code: int = status.HTTP_400_BAD_REQUEST,
|
|
117
|
+
):
|
|
118
|
+
message = "One of query_text or query_vector must be provided!"
|
|
119
|
+
super().__init__(message, name, status_code)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
class MutuallyExclusiveQueryParametersError(CogneeValidationError):
|
|
123
|
+
"""
|
|
124
|
+
Raised when both 'text' and 'embedding' are provided to the search function,
|
|
125
|
+
but only one type of input is allowed at a time.
|
|
126
|
+
"""
|
|
127
|
+
|
|
128
|
+
def __init__(
|
|
129
|
+
self,
|
|
130
|
+
name: str = "MutuallyExclusiveQueryParametersError",
|
|
131
|
+
status_code: int = status.HTTP_400_BAD_REQUEST,
|
|
132
|
+
):
|
|
133
|
+
message = "The search function accepts either text or embedding as input, but not both."
|
|
134
|
+
super().__init__(message, name, status_code)
|
|
@@ -21,10 +21,6 @@ async def get_graph_engine() -> GraphDBInterface:
|
|
|
21
21
|
if hasattr(graph_client, "initialize"):
|
|
22
22
|
await graph_client.initialize()
|
|
23
23
|
|
|
24
|
-
# Handle loading of graph for NetworkX
|
|
25
|
-
if config["graph_database_provider"].lower() == "networkx" and graph_client.graph is None:
|
|
26
|
-
await graph_client.load_graph_from_file()
|
|
27
|
-
|
|
28
24
|
return graph_client
|
|
29
25
|
|
|
30
26
|
|
|
@@ -181,8 +177,7 @@ def create_graph_engine(
|
|
|
181
177
|
graph_id=graph_identifier,
|
|
182
178
|
)
|
|
183
179
|
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
return graph_client
|
|
180
|
+
raise EnvironmentError(
|
|
181
|
+
f"Unsupported graph database provider: {graph_database_provider}. "
|
|
182
|
+
f"Supported providers are: {', '.join(list(supported_databases.keys()) + ['neo4j', 'falkordb', 'kuzu', 'kuzu-remote', 'memgraph', 'neptune', 'neptune_analytics'])}"
|
|
183
|
+
)
|
|
@@ -138,8 +138,9 @@ class KuzuAdapter(GraphDBInterface):
|
|
|
138
138
|
|
|
139
139
|
s3_file_storage = S3FileStorage("")
|
|
140
140
|
|
|
141
|
-
|
|
142
|
-
self.
|
|
141
|
+
if self.connection:
|
|
142
|
+
async with self.KUZU_ASYNC_LOCK:
|
|
143
|
+
self.connection.execute("CHECKPOINT;")
|
|
143
144
|
|
|
144
145
|
s3_file_storage.s3.put(self.temp_graph_file, self.db_path, recursive=True)
|
|
145
146
|
|
|
@@ -1631,3 +1632,64 @@ class KuzuAdapter(GraphDBInterface):
|
|
|
1631
1632
|
"""
|
|
1632
1633
|
result = await self.query(query)
|
|
1633
1634
|
return [record[0] for record in result] if result else []
|
|
1635
|
+
|
|
1636
|
+
async def get_last_user_interaction_ids(self, limit: int) -> List[str]:
|
|
1637
|
+
"""
|
|
1638
|
+
Retrieve the IDs of the most recent CogneeUserInteraction nodes.
|
|
1639
|
+
Parameters:
|
|
1640
|
+
-----------
|
|
1641
|
+
- limit (int): The maximum number of interaction IDs to return.
|
|
1642
|
+
Returns:
|
|
1643
|
+
--------
|
|
1644
|
+
- List[str]: A list of interaction IDs, sorted by created_at descending.
|
|
1645
|
+
"""
|
|
1646
|
+
|
|
1647
|
+
query = """
|
|
1648
|
+
MATCH (n)
|
|
1649
|
+
WHERE n.type = 'CogneeUserInteraction'
|
|
1650
|
+
RETURN n.id as id
|
|
1651
|
+
ORDER BY n.created_at DESC
|
|
1652
|
+
LIMIT $limit
|
|
1653
|
+
"""
|
|
1654
|
+
rows = await self.query(query, {"limit": limit})
|
|
1655
|
+
|
|
1656
|
+
id_list = [row[0] for row in rows]
|
|
1657
|
+
return id_list
|
|
1658
|
+
|
|
1659
|
+
async def apply_feedback_weight(
|
|
1660
|
+
self,
|
|
1661
|
+
node_ids: List[str],
|
|
1662
|
+
weight: float,
|
|
1663
|
+
) -> None:
|
|
1664
|
+
"""
|
|
1665
|
+
Increment `feedback_weight` inside r.properties JSON for edges where
|
|
1666
|
+
relationship_name = 'used_graph_element_to_answer'.
|
|
1667
|
+
|
|
1668
|
+
"""
|
|
1669
|
+
# Step 1: fetch matching edges
|
|
1670
|
+
query = """
|
|
1671
|
+
MATCH (n:Node)-[r:EDGE]->()
|
|
1672
|
+
WHERE n.id IN $node_ids AND r.relationship_name = 'used_graph_element_to_answer'
|
|
1673
|
+
RETURN r.properties, n.id
|
|
1674
|
+
"""
|
|
1675
|
+
results = await self.query(query, {"node_ids": node_ids})
|
|
1676
|
+
|
|
1677
|
+
# Step 2: update JSON client-side
|
|
1678
|
+
updates = []
|
|
1679
|
+
for props_json, source_id in results:
|
|
1680
|
+
try:
|
|
1681
|
+
props = json.loads(props_json) if props_json else {}
|
|
1682
|
+
except json.JSONDecodeError:
|
|
1683
|
+
props = {}
|
|
1684
|
+
|
|
1685
|
+
props["feedback_weight"] = props.get("feedback_weight", 0) + weight
|
|
1686
|
+
updates.append((source_id, json.dumps(props)))
|
|
1687
|
+
|
|
1688
|
+
# Step 3: write back
|
|
1689
|
+
for node_id, new_props in updates:
|
|
1690
|
+
update_query = """
|
|
1691
|
+
MATCH (n:Node)-[r:EDGE]->()
|
|
1692
|
+
WHERE n.id = $node_id AND r.relationship_name = 'used_graph_element_to_answer'
|
|
1693
|
+
SET r.properties = $props
|
|
1694
|
+
"""
|
|
1695
|
+
await self.query(update_query, {"node_id": node_id, "props": new_props})
|
|
@@ -1322,3 +1322,52 @@ class Neo4jAdapter(GraphDBInterface):
|
|
|
1322
1322
|
"""
|
|
1323
1323
|
result = await self.query(query)
|
|
1324
1324
|
return [record["n"] for record in result] if result else []
|
|
1325
|
+
|
|
1326
|
+
async def get_last_user_interaction_ids(self, limit: int) -> List[str]:
|
|
1327
|
+
"""
|
|
1328
|
+
Retrieve the IDs of the most recent CogneeUserInteraction nodes.
|
|
1329
|
+
Parameters:
|
|
1330
|
+
-----------
|
|
1331
|
+
- limit (int): The maximum number of interaction IDs to return.
|
|
1332
|
+
Returns:
|
|
1333
|
+
--------
|
|
1334
|
+
- List[str]: A list of interaction IDs, sorted by created_at descending.
|
|
1335
|
+
"""
|
|
1336
|
+
|
|
1337
|
+
query = """
|
|
1338
|
+
MATCH (n)
|
|
1339
|
+
WHERE n.type = 'CogneeUserInteraction'
|
|
1340
|
+
RETURN n.id as id
|
|
1341
|
+
ORDER BY n.created_at DESC
|
|
1342
|
+
LIMIT $limit
|
|
1343
|
+
"""
|
|
1344
|
+
rows = await self.query(query, {"limit": limit})
|
|
1345
|
+
|
|
1346
|
+
id_list = [row["id"] for row in rows if "id" in row]
|
|
1347
|
+
return id_list
|
|
1348
|
+
|
|
1349
|
+
async def apply_feedback_weight(
|
|
1350
|
+
self,
|
|
1351
|
+
node_ids: List[str],
|
|
1352
|
+
weight: float,
|
|
1353
|
+
) -> None:
|
|
1354
|
+
"""
|
|
1355
|
+
Increment `feedback_weight` on relationships `:used_graph_element_to_answer`
|
|
1356
|
+
outgoing from nodes whose `id` is in `node_ids`.
|
|
1357
|
+
|
|
1358
|
+
Args:
|
|
1359
|
+
node_ids: List of node IDs to match.
|
|
1360
|
+
weight: Amount to add to `r.feedback_weight` (can be negative).
|
|
1361
|
+
|
|
1362
|
+
Side effects:
|
|
1363
|
+
Updates relationship property `feedback_weight`, defaulting missing values to 0.
|
|
1364
|
+
"""
|
|
1365
|
+
query = """
|
|
1366
|
+
MATCH (n)-[r]->()
|
|
1367
|
+
WHERE n.id IN $node_ids AND r.relationship_name = 'used_graph_element_to_answer'
|
|
1368
|
+
SET r.feedback_weight = coalesce(r.feedback_weight, 0) + $weight
|
|
1369
|
+
"""
|
|
1370
|
+
await self.query(
|
|
1371
|
+
query,
|
|
1372
|
+
params={"weight": float(weight), "node_ids": list(node_ids)},
|
|
1373
|
+
)
|
|
@@ -3,11 +3,16 @@
|
|
|
3
3
|
This module defines custom exceptions for Neptune Analytics operations.
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
-
from cognee.exceptions import
|
|
6
|
+
from cognee.exceptions import (
|
|
7
|
+
CogneeSystemError,
|
|
8
|
+
CogneeTransientError,
|
|
9
|
+
CogneeValidationError,
|
|
10
|
+
CogneeConfigurationError,
|
|
11
|
+
)
|
|
7
12
|
from fastapi import status
|
|
8
13
|
|
|
9
14
|
|
|
10
|
-
class NeptuneAnalyticsError(
|
|
15
|
+
class NeptuneAnalyticsError(CogneeSystemError):
|
|
11
16
|
"""Base exception for Neptune Analytics operations."""
|
|
12
17
|
|
|
13
18
|
def __init__(
|
|
@@ -19,7 +24,7 @@ class NeptuneAnalyticsError(CogneeApiError):
|
|
|
19
24
|
super().__init__(message, name, status_code)
|
|
20
25
|
|
|
21
26
|
|
|
22
|
-
class NeptuneAnalyticsConnectionError(
|
|
27
|
+
class NeptuneAnalyticsConnectionError(CogneeTransientError):
|
|
23
28
|
"""Exception raised when connection to Neptune Analytics fails."""
|
|
24
29
|
|
|
25
30
|
def __init__(
|
|
@@ -31,7 +36,7 @@ class NeptuneAnalyticsConnectionError(NeptuneAnalyticsError):
|
|
|
31
36
|
super().__init__(message, name, status_code)
|
|
32
37
|
|
|
33
38
|
|
|
34
|
-
class NeptuneAnalyticsQueryError(
|
|
39
|
+
class NeptuneAnalyticsQueryError(CogneeValidationError):
|
|
35
40
|
"""Exception raised when a query execution fails."""
|
|
36
41
|
|
|
37
42
|
def __init__(
|
|
@@ -43,7 +48,7 @@ class NeptuneAnalyticsQueryError(NeptuneAnalyticsError):
|
|
|
43
48
|
super().__init__(message, name, status_code)
|
|
44
49
|
|
|
45
50
|
|
|
46
|
-
class NeptuneAnalyticsAuthenticationError(
|
|
51
|
+
class NeptuneAnalyticsAuthenticationError(CogneeConfigurationError):
|
|
47
52
|
"""Exception raised when authentication with Neptune Analytics fails."""
|
|
48
53
|
|
|
49
54
|
def __init__(
|
|
@@ -55,7 +60,7 @@ class NeptuneAnalyticsAuthenticationError(NeptuneAnalyticsError):
|
|
|
55
60
|
super().__init__(message, name, status_code)
|
|
56
61
|
|
|
57
62
|
|
|
58
|
-
class NeptuneAnalyticsConfigurationError(
|
|
63
|
+
class NeptuneAnalyticsConfigurationError(CogneeConfigurationError):
|
|
59
64
|
"""Exception raised when Neptune Analytics configuration is invalid."""
|
|
60
65
|
|
|
61
66
|
def __init__(
|
|
@@ -67,7 +72,7 @@ class NeptuneAnalyticsConfigurationError(NeptuneAnalyticsError):
|
|
|
67
72
|
super().__init__(message, name, status_code)
|
|
68
73
|
|
|
69
74
|
|
|
70
|
-
class NeptuneAnalyticsTimeoutError(
|
|
75
|
+
class NeptuneAnalyticsTimeoutError(CogneeTransientError):
|
|
71
76
|
"""Exception raised when a Neptune Analytics operation times out."""
|
|
72
77
|
|
|
73
78
|
def __init__(
|
|
@@ -79,7 +84,7 @@ class NeptuneAnalyticsTimeoutError(NeptuneAnalyticsError):
|
|
|
79
84
|
super().__init__(message, name, status_code)
|
|
80
85
|
|
|
81
86
|
|
|
82
|
-
class NeptuneAnalyticsThrottlingError(
|
|
87
|
+
class NeptuneAnalyticsThrottlingError(CogneeTransientError):
|
|
83
88
|
"""Exception raised when requests are throttled by Neptune Analytics."""
|
|
84
89
|
|
|
85
90
|
def __init__(
|
|
@@ -91,7 +96,7 @@ class NeptuneAnalyticsThrottlingError(NeptuneAnalyticsError):
|
|
|
91
96
|
super().__init__(message, name, status_code)
|
|
92
97
|
|
|
93
98
|
|
|
94
|
-
class NeptuneAnalyticsResourceNotFoundError(
|
|
99
|
+
class NeptuneAnalyticsResourceNotFoundError(CogneeValidationError):
|
|
95
100
|
"""Exception raised when a Neptune Analytics resource is not found."""
|
|
96
101
|
|
|
97
102
|
def __init__(
|
|
@@ -103,7 +108,7 @@ class NeptuneAnalyticsResourceNotFoundError(NeptuneAnalyticsError):
|
|
|
103
108
|
super().__init__(message, name, status_code)
|
|
104
109
|
|
|
105
110
|
|
|
106
|
-
class NeptuneAnalyticsInvalidParameterError(
|
|
111
|
+
class NeptuneAnalyticsInvalidParameterError(CogneeValidationError):
|
|
107
112
|
"""Exception raised when invalid parameters are provided to Neptune Analytics."""
|
|
108
113
|
|
|
109
114
|
def __init__(
|
|
@@ -9,7 +9,7 @@ from typing import List, Dict, Any, Optional, Tuple, Type, Union
|
|
|
9
9
|
|
|
10
10
|
from falkordb import FalkorDB
|
|
11
11
|
|
|
12
|
-
from cognee.exceptions import
|
|
12
|
+
from cognee.infrastructure.databases.exceptions import MissingQueryParameterError
|
|
13
13
|
from cognee.infrastructure.databases.graph.graph_db_interface import (
|
|
14
14
|
GraphDBInterface,
|
|
15
15
|
record_graph_changes,
|
|
@@ -721,7 +721,7 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface):
|
|
|
721
721
|
Returns the search results as a result set from the graph database.
|
|
722
722
|
"""
|
|
723
723
|
if query_text is None and query_vector is None:
|
|
724
|
-
raise
|
|
724
|
+
raise MissingQueryParameterError()
|
|
725
725
|
|
|
726
726
|
if query_text and not query_vector:
|
|
727
727
|
query_vector = (await self.embed_data([query_text]))[0]
|
|
@@ -5,7 +5,8 @@ import json
|
|
|
5
5
|
from typing import List, Optional, Any, Dict, Type, Tuple
|
|
6
6
|
from uuid import UUID
|
|
7
7
|
|
|
8
|
-
from cognee.exceptions import
|
|
8
|
+
from cognee.infrastructure.databases.exceptions import MissingQueryParameterError
|
|
9
|
+
from cognee.infrastructure.databases.exceptions import MutuallyExclusiveQueryParametersError
|
|
9
10
|
from cognee.infrastructure.databases.graph.neptune_driver.adapter import NeptuneGraphDB
|
|
10
11
|
from cognee.infrastructure.databases.vector.vector_db_interface import VectorDBInterface
|
|
11
12
|
from cognee.infrastructure.engine import DataPoint
|
|
@@ -274,11 +275,9 @@ class NeptuneAnalyticsAdapter(NeptuneGraphDB, VectorDBInterface):
|
|
|
274
275
|
limit = self._TOPK_UPPER_BOUND
|
|
275
276
|
|
|
276
277
|
if query_vector and query_text:
|
|
277
|
-
raise
|
|
278
|
-
message="The search function accepts either text or embedding as input, but not both."
|
|
279
|
-
)
|
|
278
|
+
raise MutuallyExclusiveQueryParametersError()
|
|
280
279
|
elif query_text is None and query_vector is None:
|
|
281
|
-
raise
|
|
280
|
+
raise MissingQueryParameterError()
|
|
282
281
|
elif query_vector:
|
|
283
282
|
embedding = query_vector
|
|
284
283
|
else:
|
|
@@ -4,13 +4,13 @@ from uuid import UUID
|
|
|
4
4
|
from typing import List, Optional
|
|
5
5
|
from chromadb import AsyncHttpClient, Settings
|
|
6
6
|
|
|
7
|
-
from cognee.exceptions import InvalidValueError
|
|
8
7
|
from cognee.shared.logging_utils import get_logger
|
|
9
8
|
from cognee.modules.storage.utils import get_own_properties
|
|
10
9
|
from cognee.infrastructure.engine import DataPoint
|
|
11
10
|
from cognee.infrastructure.engine.utils import parse_id
|
|
12
11
|
from cognee.infrastructure.databases.vector.exceptions import CollectionNotFoundError
|
|
13
12
|
from cognee.infrastructure.databases.vector.models.ScoredResult import ScoredResult
|
|
13
|
+
from cognee.infrastructure.databases.exceptions import MissingQueryParameterError
|
|
14
14
|
|
|
15
15
|
from ..embeddings.EmbeddingEngine import EmbeddingEngine
|
|
16
16
|
from ..vector_db_interface import VectorDBInterface
|
|
@@ -378,7 +378,7 @@ class ChromaDBAdapter(VectorDBInterface):
|
|
|
378
378
|
Returns a list of ScoredResult instances representing the search results.
|
|
379
379
|
"""
|
|
380
380
|
if query_text is None and query_vector is None:
|
|
381
|
-
raise
|
|
381
|
+
raise MissingQueryParameterError()
|
|
382
382
|
|
|
383
383
|
if query_text and not query_vector:
|
|
384
384
|
query_vector = (await self.embedding_engine.embed_text([query_text]))[0]
|
|
@@ -41,11 +41,11 @@ class FastembedEmbeddingEngine(EmbeddingEngine):
|
|
|
41
41
|
self,
|
|
42
42
|
model: Optional[str] = "openai/text-embedding-3-large",
|
|
43
43
|
dimensions: Optional[int] = 3072,
|
|
44
|
-
|
|
44
|
+
max_completion_tokens: int = 512,
|
|
45
45
|
):
|
|
46
46
|
self.model = model
|
|
47
47
|
self.dimensions = dimensions
|
|
48
|
-
self.
|
|
48
|
+
self.max_completion_tokens = max_completion_tokens
|
|
49
49
|
self.tokenizer = self.get_tokenizer()
|
|
50
50
|
# self.retry_count = 0
|
|
51
51
|
self.embedding_model = TextEmbedding(model_name=model)
|
|
@@ -112,7 +112,9 @@ class FastembedEmbeddingEngine(EmbeddingEngine):
|
|
|
112
112
|
"""
|
|
113
113
|
logger.debug("Loading tokenizer for FastembedEmbeddingEngine...")
|
|
114
114
|
|
|
115
|
-
tokenizer = TikTokenTokenizer(
|
|
115
|
+
tokenizer = TikTokenTokenizer(
|
|
116
|
+
model="gpt-4o", max_completion_tokens=self.max_completion_tokens
|
|
117
|
+
)
|
|
116
118
|
|
|
117
119
|
logger.debug("Tokenizer loaded for for FastembedEmbeddingEngine")
|
|
118
120
|
return tokenizer
|
|
@@ -6,7 +6,7 @@ import math
|
|
|
6
6
|
import litellm
|
|
7
7
|
import os
|
|
8
8
|
from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine
|
|
9
|
-
from cognee.infrastructure.databases.exceptions
|
|
9
|
+
from cognee.infrastructure.databases.exceptions import EmbeddingException
|
|
10
10
|
from cognee.infrastructure.llm.tokenizer.Gemini import (
|
|
11
11
|
GeminiTokenizer,
|
|
12
12
|
)
|
|
@@ -57,7 +57,7 @@ class LiteLLMEmbeddingEngine(EmbeddingEngine):
|
|
|
57
57
|
api_key: str = None,
|
|
58
58
|
endpoint: str = None,
|
|
59
59
|
api_version: str = None,
|
|
60
|
-
|
|
60
|
+
max_completion_tokens: int = 512,
|
|
61
61
|
):
|
|
62
62
|
self.api_key = api_key
|
|
63
63
|
self.endpoint = endpoint
|
|
@@ -65,7 +65,7 @@ class LiteLLMEmbeddingEngine(EmbeddingEngine):
|
|
|
65
65
|
self.provider = provider
|
|
66
66
|
self.model = model
|
|
67
67
|
self.dimensions = dimensions
|
|
68
|
-
self.
|
|
68
|
+
self.max_completion_tokens = max_completion_tokens
|
|
69
69
|
self.tokenizer = self.get_tokenizer()
|
|
70
70
|
self.retry_count = 0
|
|
71
71
|
|
|
@@ -179,20 +179,29 @@ class LiteLLMEmbeddingEngine(EmbeddingEngine):
|
|
|
179
179
|
model = self.model.split("/")[-1]
|
|
180
180
|
|
|
181
181
|
if "openai" in self.provider.lower():
|
|
182
|
-
tokenizer = TikTokenTokenizer(
|
|
182
|
+
tokenizer = TikTokenTokenizer(
|
|
183
|
+
model=model, max_completion_tokens=self.max_completion_tokens
|
|
184
|
+
)
|
|
183
185
|
elif "gemini" in self.provider.lower():
|
|
184
|
-
tokenizer = GeminiTokenizer(
|
|
186
|
+
tokenizer = GeminiTokenizer(
|
|
187
|
+
model=model, max_completion_tokens=self.max_completion_tokens
|
|
188
|
+
)
|
|
185
189
|
elif "mistral" in self.provider.lower():
|
|
186
|
-
tokenizer = MistralTokenizer(
|
|
190
|
+
tokenizer = MistralTokenizer(
|
|
191
|
+
model=model, max_completion_tokens=self.max_completion_tokens
|
|
192
|
+
)
|
|
187
193
|
else:
|
|
188
194
|
try:
|
|
189
195
|
tokenizer = HuggingFaceTokenizer(
|
|
190
|
-
model=self.model.replace("hosted_vllm/", ""),
|
|
196
|
+
model=self.model.replace("hosted_vllm/", ""),
|
|
197
|
+
max_completion_tokens=self.max_completion_tokens,
|
|
191
198
|
)
|
|
192
199
|
except Exception as e:
|
|
193
200
|
logger.warning(f"Could not get tokenizer from HuggingFace due to: {e}")
|
|
194
201
|
logger.info("Switching to TikToken default tokenizer.")
|
|
195
|
-
tokenizer = TikTokenTokenizer(
|
|
202
|
+
tokenizer = TikTokenTokenizer(
|
|
203
|
+
model=None, max_completion_tokens=self.max_completion_tokens
|
|
204
|
+
)
|
|
196
205
|
|
|
197
206
|
logger.debug(f"Tokenizer loaded for model: {self.model}")
|
|
198
207
|
return tokenizer
|
|
@@ -30,7 +30,7 @@ class OllamaEmbeddingEngine(EmbeddingEngine):
|
|
|
30
30
|
Instance variables:
|
|
31
31
|
- model
|
|
32
32
|
- dimensions
|
|
33
|
-
-
|
|
33
|
+
- max_completion_tokens
|
|
34
34
|
- endpoint
|
|
35
35
|
- mock
|
|
36
36
|
- huggingface_tokenizer_name
|
|
@@ -39,7 +39,7 @@ class OllamaEmbeddingEngine(EmbeddingEngine):
|
|
|
39
39
|
|
|
40
40
|
model: str
|
|
41
41
|
dimensions: int
|
|
42
|
-
|
|
42
|
+
max_completion_tokens: int
|
|
43
43
|
endpoint: str
|
|
44
44
|
mock: bool
|
|
45
45
|
huggingface_tokenizer_name: str
|
|
@@ -50,13 +50,13 @@ class OllamaEmbeddingEngine(EmbeddingEngine):
|
|
|
50
50
|
self,
|
|
51
51
|
model: Optional[str] = "avr/sfr-embedding-mistral:latest",
|
|
52
52
|
dimensions: Optional[int] = 1024,
|
|
53
|
-
|
|
53
|
+
max_completion_tokens: int = 512,
|
|
54
54
|
endpoint: Optional[str] = "http://localhost:11434/api/embeddings",
|
|
55
55
|
huggingface_tokenizer: str = "Salesforce/SFR-Embedding-Mistral",
|
|
56
56
|
):
|
|
57
57
|
self.model = model
|
|
58
58
|
self.dimensions = dimensions
|
|
59
|
-
self.
|
|
59
|
+
self.max_completion_tokens = max_completion_tokens
|
|
60
60
|
self.endpoint = endpoint
|
|
61
61
|
self.huggingface_tokenizer_name = huggingface_tokenizer
|
|
62
62
|
self.tokenizer = self.get_tokenizer()
|
|
@@ -132,7 +132,7 @@ class OllamaEmbeddingEngine(EmbeddingEngine):
|
|
|
132
132
|
"""
|
|
133
133
|
logger.debug("Loading HuggingfaceTokenizer for OllamaEmbeddingEngine...")
|
|
134
134
|
tokenizer = HuggingFaceTokenizer(
|
|
135
|
-
model=self.huggingface_tokenizer_name,
|
|
135
|
+
model=self.huggingface_tokenizer_name, max_completion_tokens=self.max_completion_tokens
|
|
136
136
|
)
|
|
137
137
|
logger.debug("Tokenizer loaded for OllamaEmbeddingEngine")
|
|
138
138
|
return tokenizer
|
|
@@ -18,7 +18,7 @@ class EmbeddingConfig(BaseSettings):
|
|
|
18
18
|
embedding_endpoint: Optional[str] = None
|
|
19
19
|
embedding_api_key: Optional[str] = None
|
|
20
20
|
embedding_api_version: Optional[str] = None
|
|
21
|
-
|
|
21
|
+
embedding_max_completion_tokens: Optional[int] = 8191
|
|
22
22
|
huggingface_tokenizer: Optional[str] = None
|
|
23
23
|
model_config = SettingsConfigDict(env_file=".env", extra="allow")
|
|
24
24
|
|
|
@@ -38,7 +38,7 @@ class EmbeddingConfig(BaseSettings):
|
|
|
38
38
|
"embedding_endpoint": self.embedding_endpoint,
|
|
39
39
|
"embedding_api_key": self.embedding_api_key,
|
|
40
40
|
"embedding_api_version": self.embedding_api_version,
|
|
41
|
-
"
|
|
41
|
+
"embedding_max_completion_tokens": self.embedding_max_completion_tokens,
|
|
42
42
|
"huggingface_tokenizer": self.huggingface_tokenizer,
|
|
43
43
|
}
|
|
44
44
|
|
|
@@ -27,7 +27,7 @@ def get_embedding_engine() -> EmbeddingEngine:
|
|
|
27
27
|
config.embedding_provider,
|
|
28
28
|
config.embedding_model,
|
|
29
29
|
config.embedding_dimensions,
|
|
30
|
-
config.
|
|
30
|
+
config.embedding_max_completion_tokens,
|
|
31
31
|
config.embedding_endpoint,
|
|
32
32
|
config.embedding_api_key,
|
|
33
33
|
config.embedding_api_version,
|
|
@@ -41,7 +41,7 @@ def create_embedding_engine(
|
|
|
41
41
|
embedding_provider,
|
|
42
42
|
embedding_model,
|
|
43
43
|
embedding_dimensions,
|
|
44
|
-
|
|
44
|
+
embedding_max_completion_tokens,
|
|
45
45
|
embedding_endpoint,
|
|
46
46
|
embedding_api_key,
|
|
47
47
|
embedding_api_version,
|
|
@@ -58,7 +58,7 @@ def create_embedding_engine(
|
|
|
58
58
|
'ollama', or another supported provider.
|
|
59
59
|
- embedding_model: The model to be used for the embedding engine.
|
|
60
60
|
- embedding_dimensions: The number of dimensions for the embeddings.
|
|
61
|
-
-
|
|
61
|
+
- embedding_max_completion_tokens: The maximum number of tokens for the embeddings.
|
|
62
62
|
- embedding_endpoint: The endpoint for the embedding service, relevant for certain
|
|
63
63
|
providers.
|
|
64
64
|
- embedding_api_key: API key to authenticate with the embedding service, if
|
|
@@ -81,7 +81,7 @@ def create_embedding_engine(
|
|
|
81
81
|
return FastembedEmbeddingEngine(
|
|
82
82
|
model=embedding_model,
|
|
83
83
|
dimensions=embedding_dimensions,
|
|
84
|
-
|
|
84
|
+
max_completion_tokens=embedding_max_completion_tokens,
|
|
85
85
|
)
|
|
86
86
|
|
|
87
87
|
if embedding_provider == "ollama":
|
|
@@ -90,7 +90,7 @@ def create_embedding_engine(
|
|
|
90
90
|
return OllamaEmbeddingEngine(
|
|
91
91
|
model=embedding_model,
|
|
92
92
|
dimensions=embedding_dimensions,
|
|
93
|
-
|
|
93
|
+
max_completion_tokens=embedding_max_completion_tokens,
|
|
94
94
|
endpoint=embedding_endpoint,
|
|
95
95
|
huggingface_tokenizer=huggingface_tokenizer,
|
|
96
96
|
)
|
|
@@ -104,5 +104,5 @@ def create_embedding_engine(
|
|
|
104
104
|
api_version=embedding_api_version,
|
|
105
105
|
model=embedding_model,
|
|
106
106
|
dimensions=embedding_dimensions,
|
|
107
|
-
|
|
107
|
+
max_completion_tokens=embedding_max_completion_tokens,
|
|
108
108
|
)
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
from fastapi import status
|
|
2
|
-
from cognee.exceptions import
|
|
2
|
+
from cognee.exceptions import CogneeValidationError
|
|
3
3
|
|
|
4
4
|
|
|
5
|
-
class CollectionNotFoundError(
|
|
5
|
+
class CollectionNotFoundError(CogneeValidationError):
|
|
6
6
|
"""
|
|
7
7
|
Represents an error that occurs when a requested collection cannot be found.
|
|
8
8
|
|
|
9
|
-
This class extends the
|
|
9
|
+
This class extends the CogneeValidationError to handle specific cases where a requested
|
|
10
10
|
collection is unavailable. It can be initialized with a custom message and allows for
|
|
11
11
|
logging options including log level and whether to log the error.
|
|
12
12
|
"""
|
|
@@ -5,7 +5,7 @@ from pydantic import BaseModel
|
|
|
5
5
|
from lancedb.pydantic import LanceModel, Vector
|
|
6
6
|
from typing import Generic, List, Optional, TypeVar, Union, get_args, get_origin, get_type_hints
|
|
7
7
|
|
|
8
|
-
from cognee.exceptions import
|
|
8
|
+
from cognee.infrastructure.databases.exceptions import MissingQueryParameterError
|
|
9
9
|
from cognee.infrastructure.engine import DataPoint
|
|
10
10
|
from cognee.infrastructure.engine.utils import parse_id
|
|
11
11
|
from cognee.infrastructure.files.storage import get_file_storage
|
|
@@ -228,7 +228,7 @@ class LanceDBAdapter(VectorDBInterface):
|
|
|
228
228
|
normalized: bool = True,
|
|
229
229
|
):
|
|
230
230
|
if query_text is None and query_vector is None:
|
|
231
|
-
raise
|
|
231
|
+
raise MissingQueryParameterError()
|
|
232
232
|
|
|
233
233
|
if query_text and not query_vector:
|
|
234
234
|
query_vector = (await self.embedding_engine.embed_text([query_text]))[0]
|