cognee 0.5.1.dev0__py3-none-any.whl → 0.5.2.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognee/__init__.py +2 -0
- cognee/alembic/README +1 -0
- cognee/alembic/env.py +107 -0
- cognee/alembic/script.py.mako +26 -0
- cognee/alembic/versions/1a58b986e6e1_enable_delete_for_old_tutorial_notebooks.py +52 -0
- cognee/alembic/versions/1d0bb7fede17_add_pipeline_run_status.py +33 -0
- cognee/alembic/versions/1daae0df1866_incremental_loading.py +48 -0
- cognee/alembic/versions/211ab850ef3d_add_sync_operations_table.py +118 -0
- cognee/alembic/versions/45957f0a9849_add_notebook_table.py +46 -0
- cognee/alembic/versions/46a6ce2bd2b2_expand_dataset_database_with_json_.py +333 -0
- cognee/alembic/versions/482cd6517ce4_add_default_user.py +30 -0
- cognee/alembic/versions/76625596c5c3_expand_dataset_database_for_multi_user.py +98 -0
- cognee/alembic/versions/8057ae7329c2_initial_migration.py +25 -0
- cognee/alembic/versions/9e7a3cb85175_loader_separation.py +104 -0
- cognee/alembic/versions/a1b2c3d4e5f6_add_label_column_to_data.py +38 -0
- cognee/alembic/versions/ab7e313804ae_permission_system_rework.py +236 -0
- cognee/alembic/versions/b9274c27a25a_kuzu_11_migration.py +75 -0
- cognee/alembic/versions/c946955da633_multi_tenant_support.py +137 -0
- cognee/alembic/versions/e1ec1dcb50b6_add_last_accessed_to_data.py +51 -0
- cognee/alembic/versions/e4ebee1091e7_expand_data_model_info.py +140 -0
- cognee/alembic.ini +117 -0
- cognee/api/v1/add/routers/get_add_router.py +2 -0
- cognee/api/v1/cognify/cognify.py +11 -6
- cognee/api/v1/cognify/routers/get_cognify_router.py +8 -0
- cognee/api/v1/config/config.py +60 -0
- cognee/api/v1/datasets/routers/get_datasets_router.py +45 -3
- cognee/api/v1/memify/routers/get_memify_router.py +2 -0
- cognee/api/v1/search/routers/get_search_router.py +21 -6
- cognee/api/v1/search/search.py +25 -5
- cognee/api/v1/sync/routers/get_sync_router.py +3 -3
- cognee/cli/commands/add_command.py +1 -1
- cognee/cli/commands/cognify_command.py +6 -0
- cognee/cli/commands/config_command.py +1 -1
- cognee/context_global_variables.py +5 -1
- cognee/eval_framework/answer_generation/answer_generation_executor.py +7 -8
- cognee/infrastructure/databases/cache/cache_db_interface.py +38 -1
- cognee/infrastructure/databases/cache/config.py +6 -0
- cognee/infrastructure/databases/cache/fscache/FsCacheAdapter.py +21 -0
- cognee/infrastructure/databases/cache/get_cache_engine.py +9 -3
- cognee/infrastructure/databases/cache/redis/RedisAdapter.py +60 -1
- cognee/infrastructure/databases/dataset_database_handler/supported_dataset_database_handlers.py +7 -0
- cognee/infrastructure/databases/graph/get_graph_engine.py +29 -1
- cognee/infrastructure/databases/graph/neo4j_driver/Neo4jAuraDevDatasetDatabaseHandler.py +62 -27
- cognee/infrastructure/databases/hybrid/neptune_analytics/NeptuneAnalyticsAdapter.py +17 -4
- cognee/infrastructure/databases/relational/sqlalchemy/SqlAlchemyAdapter.py +2 -1
- cognee/infrastructure/databases/vector/chromadb/ChromaDBAdapter.py +2 -0
- cognee/infrastructure/databases/vector/config.py +6 -0
- cognee/infrastructure/databases/vector/create_vector_engine.py +69 -22
- cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py +64 -9
- cognee/infrastructure/databases/vector/embeddings/OllamaEmbeddingEngine.py +13 -2
- cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py +16 -3
- cognee/infrastructure/databases/vector/models/ScoredResult.py +3 -3
- cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py +16 -3
- cognee/infrastructure/databases/vector/pgvector/PGVectorDatasetDatabaseHandler.py +86 -0
- cognee/infrastructure/databases/vector/pgvector/create_db_and_tables.py +81 -2
- cognee/infrastructure/databases/vector/vector_db_interface.py +8 -0
- cognee/infrastructure/files/utils/get_data_file_path.py +33 -27
- cognee/infrastructure/llm/prompts/extract_query_time.txt +1 -1
- cognee/infrastructure/llm/prompts/generate_event_entity_prompt.txt +1 -1
- cognee/infrastructure/llm/prompts/generate_event_graph_prompt.txt +1 -1
- cognee/infrastructure/llm/prompts/generate_graph_prompt.txt +2 -2
- cognee/infrastructure/llm/prompts/generate_graph_prompt_guided.txt +1 -1
- cognee/infrastructure/llm/prompts/generate_graph_prompt_oneshot.txt +2 -2
- cognee/infrastructure/llm/prompts/generate_graph_prompt_simple.txt +1 -1
- cognee/infrastructure/llm/prompts/generate_graph_prompt_strict.txt +1 -1
- cognee/infrastructure/llm/prompts/search_type_selector_prompt.txt +6 -6
- cognee/infrastructure/llm/prompts/test.txt +1 -1
- cognee/infrastructure/llm/prompts/translate_content.txt +19 -0
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get_llm_client.py +24 -0
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/llama_cpp/adapter.py +191 -0
- cognee/modules/chunking/models/DocumentChunk.py +0 -1
- cognee/modules/cognify/config.py +2 -0
- cognee/modules/data/models/Data.py +1 -0
- cognee/modules/engine/models/Entity.py +0 -1
- cognee/modules/engine/operations/setup.py +6 -0
- cognee/modules/graph/cognee_graph/CogneeGraph.py +150 -37
- cognee/modules/graph/cognee_graph/CogneeGraphElements.py +48 -2
- cognee/modules/graph/utils/__init__.py +1 -0
- cognee/modules/graph/utils/get_entity_nodes_from_triplets.py +12 -0
- cognee/modules/notebooks/methods/__init__.py +1 -0
- cognee/modules/notebooks/methods/create_notebook.py +0 -34
- cognee/modules/notebooks/methods/create_tutorial_notebooks.py +191 -0
- cognee/modules/notebooks/methods/get_notebooks.py +12 -8
- cognee/modules/notebooks/tutorials/cognee-basics/cell-1.md +3 -0
- cognee/modules/notebooks/tutorials/cognee-basics/cell-2.md +10 -0
- cognee/modules/notebooks/tutorials/cognee-basics/cell-3.md +7 -0
- cognee/modules/notebooks/tutorials/cognee-basics/cell-4.py +28 -0
- cognee/modules/notebooks/tutorials/cognee-basics/cell-5.py +3 -0
- cognee/modules/notebooks/tutorials/cognee-basics/cell-6.py +9 -0
- cognee/modules/notebooks/tutorials/cognee-basics/cell-7.py +17 -0
- cognee/modules/notebooks/tutorials/cognee-basics/config.json +4 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-1.md +3 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-10.md +3 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-11.md +3 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-12.py +3 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-13.md +7 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-14.py +6 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-15.md +3 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-16.py +7 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-2.md +9 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-3.md +7 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-4.md +9 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-5.md +5 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-6.py +13 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-7.md +3 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-8.md +3 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/cell-9.py +31 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/config.json +4 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/data/copilot_conversations.json +107 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/data/guido_contributions.json +976 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/data/my_developer_rules.md +79 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/data/pep_style_guide.md +74 -0
- cognee/modules/notebooks/tutorials/python-development-with-cognee/data/zen_principles.md +74 -0
- cognee/modules/retrieval/EntityCompletionRetriever.py +51 -38
- cognee/modules/retrieval/__init__.py +0 -1
- cognee/modules/retrieval/base_retriever.py +66 -10
- cognee/modules/retrieval/chunks_retriever.py +57 -49
- cognee/modules/retrieval/coding_rules_retriever.py +12 -5
- cognee/modules/retrieval/completion_retriever.py +29 -28
- cognee/modules/retrieval/cypher_search_retriever.py +25 -20
- cognee/modules/retrieval/graph_completion_context_extension_retriever.py +42 -46
- cognee/modules/retrieval/graph_completion_cot_retriever.py +68 -51
- cognee/modules/retrieval/graph_completion_retriever.py +78 -63
- cognee/modules/retrieval/graph_summary_completion_retriever.py +2 -0
- cognee/modules/retrieval/lexical_retriever.py +34 -12
- cognee/modules/retrieval/natural_language_retriever.py +18 -15
- cognee/modules/retrieval/summaries_retriever.py +51 -34
- cognee/modules/retrieval/temporal_retriever.py +59 -49
- cognee/modules/retrieval/triplet_retriever.py +31 -32
- cognee/modules/retrieval/utils/access_tracking.py +88 -0
- cognee/modules/retrieval/utils/brute_force_triplet_search.py +99 -85
- cognee/modules/retrieval/utils/node_edge_vector_search.py +174 -0
- cognee/modules/search/methods/__init__.py +1 -0
- cognee/modules/search/methods/get_retriever_output.py +53 -0
- cognee/modules/search/methods/get_search_type_retriever_instance.py +252 -0
- cognee/modules/search/methods/search.py +90 -215
- cognee/modules/search/models/SearchResultPayload.py +67 -0
- cognee/modules/search/types/SearchResult.py +1 -8
- cognee/modules/search/types/SearchType.py +1 -2
- cognee/modules/search/types/__init__.py +1 -1
- cognee/modules/search/utils/__init__.py +1 -2
- cognee/modules/search/utils/transform_insights_to_graph.py +2 -2
- cognee/modules/search/utils/{transform_context_to_graph.py → transform_triplets_to_graph.py} +2 -2
- cognee/modules/users/authentication/default/default_transport.py +11 -1
- cognee/modules/users/authentication/get_api_auth_backend.py +2 -1
- cognee/modules/users/authentication/get_client_auth_backend.py +2 -1
- cognee/modules/users/methods/create_user.py +0 -9
- cognee/modules/users/permissions/methods/has_user_management_permission.py +29 -0
- cognee/modules/visualization/cognee_network_visualization.py +1 -1
- cognee/run_migrations.py +48 -0
- cognee/shared/exceptions/__init__.py +1 -3
- cognee/shared/exceptions/exceptions.py +11 -1
- cognee/shared/usage_logger.py +332 -0
- cognee/shared/utils.py +12 -5
- cognee/tasks/cleanup/cleanup_unused_data.py +172 -0
- cognee/tasks/memify/extract_usage_frequency.py +613 -0
- cognee/tasks/summarization/models.py +0 -2
- cognee/tasks/temporal_graph/__init__.py +0 -1
- cognee/tasks/translation/__init__.py +96 -0
- cognee/tasks/translation/config.py +110 -0
- cognee/tasks/translation/detect_language.py +190 -0
- cognee/tasks/translation/exceptions.py +62 -0
- cognee/tasks/translation/models.py +72 -0
- cognee/tasks/translation/providers/__init__.py +44 -0
- cognee/tasks/translation/providers/azure_provider.py +192 -0
- cognee/tasks/translation/providers/base.py +85 -0
- cognee/tasks/translation/providers/google_provider.py +158 -0
- cognee/tasks/translation/providers/llm_provider.py +143 -0
- cognee/tasks/translation/translate_content.py +282 -0
- cognee/tasks/web_scraper/default_url_crawler.py +6 -2
- cognee/tests/cli_tests/cli_unit_tests/test_cli_commands.py +1 -0
- cognee/tests/cli_tests/cli_unit_tests/test_cli_edge_cases.py +3 -0
- cognee/tests/integration/retrieval/test_brute_force_triplet_search_with_cognify.py +62 -0
- cognee/tests/integration/retrieval/test_chunks_retriever.py +115 -16
- cognee/tests/integration/retrieval/test_graph_completion_retriever.py +13 -5
- cognee/tests/integration/retrieval/test_graph_completion_retriever_context_extension.py +22 -20
- cognee/tests/integration/retrieval/test_graph_completion_retriever_cot.py +23 -24
- cognee/tests/integration/retrieval/test_rag_completion_retriever.py +70 -5
- cognee/tests/integration/retrieval/test_structured_output.py +62 -18
- cognee/tests/integration/retrieval/test_summaries_retriever.py +20 -9
- cognee/tests/integration/retrieval/test_temporal_retriever.py +38 -8
- cognee/tests/integration/retrieval/test_triplet_retriever.py +13 -4
- cognee/tests/integration/shared/test_usage_logger_integration.py +255 -0
- cognee/tests/tasks/translation/README.md +147 -0
- cognee/tests/tasks/translation/__init__.py +1 -0
- cognee/tests/tasks/translation/config_test.py +93 -0
- cognee/tests/tasks/translation/detect_language_test.py +118 -0
- cognee/tests/tasks/translation/providers_test.py +151 -0
- cognee/tests/tasks/translation/translate_content_test.py +213 -0
- cognee/tests/test_chromadb.py +1 -1
- cognee/tests/test_cleanup_unused_data.py +165 -0
- cognee/tests/test_delete_by_id.py +6 -6
- cognee/tests/test_extract_usage_frequency.py +308 -0
- cognee/tests/test_kuzu.py +17 -7
- cognee/tests/test_lancedb.py +3 -1
- cognee/tests/test_library.py +1 -1
- cognee/tests/test_neo4j.py +17 -7
- cognee/tests/test_neptune_analytics_vector.py +3 -1
- cognee/tests/test_permissions.py +172 -187
- cognee/tests/test_pgvector.py +3 -1
- cognee/tests/test_relational_db_migration.py +15 -1
- cognee/tests/test_remote_kuzu.py +3 -1
- cognee/tests/test_s3_file_storage.py +1 -1
- cognee/tests/test_search_db.py +97 -110
- cognee/tests/test_usage_logger_e2e.py +268 -0
- cognee/tests/unit/api/test_get_raw_data_endpoint.py +206 -0
- cognee/tests/unit/eval_framework/answer_generation_test.py +4 -3
- cognee/tests/unit/infrastructure/databases/cache/test_cache_config.py +2 -0
- cognee/tests/unit/modules/graph/cognee_graph_elements_test.py +42 -2
- cognee/tests/unit/modules/graph/cognee_graph_test.py +329 -31
- cognee/tests/unit/modules/retrieval/chunks_retriever_test.py +31 -59
- cognee/tests/unit/modules/retrieval/graph_completion_retriever_context_extension_test.py +70 -33
- cognee/tests/unit/modules/retrieval/graph_completion_retriever_cot_test.py +72 -52
- cognee/tests/unit/modules/retrieval/graph_completion_retriever_test.py +27 -33
- cognee/tests/unit/modules/retrieval/rag_completion_retriever_test.py +28 -15
- cognee/tests/unit/modules/retrieval/summaries_retriever_test.py +37 -42
- cognee/tests/unit/modules/retrieval/temporal_retriever_test.py +48 -64
- cognee/tests/unit/modules/retrieval/test_brute_force_triplet_search.py +263 -24
- cognee/tests/unit/modules/retrieval/test_node_edge_vector_search.py +273 -0
- cognee/tests/unit/modules/retrieval/triplet_retriever_test.py +30 -16
- cognee/tests/unit/modules/search/test_get_search_type_retriever_instance.py +125 -0
- cognee/tests/unit/modules/search/test_search.py +176 -0
- cognee/tests/unit/modules/search/test_search_prepare_search_result_contract.py +190 -0
- cognee/tests/unit/modules/users/test_tutorial_notebook_creation.py +511 -297
- cognee/tests/unit/shared/test_usage_logger.py +241 -0
- cognee/tests/unit/users/permissions/test_has_user_management_permission.py +46 -0
- {cognee-0.5.1.dev0.dist-info → cognee-0.5.2.dev0.dist-info}/METADATA +17 -10
- {cognee-0.5.1.dev0.dist-info → cognee-0.5.2.dev0.dist-info}/RECORD +232 -144
- cognee/api/.env.example +0 -5
- cognee/modules/retrieval/base_graph_retriever.py +0 -24
- cognee/modules/search/methods/get_search_type_tools.py +0 -223
- cognee/modules/search/methods/no_access_control_search.py +0 -62
- cognee/modules/search/utils/prepare_search_result.py +0 -63
- cognee/tests/test_feedback_enrichment.py +0 -174
- {cognee-0.5.1.dev0.dist-info → cognee-0.5.2.dev0.dist-info}/WHEEL +0 -0
- {cognee-0.5.1.dev0.dist-info → cognee-0.5.2.dev0.dist-info}/entry_points.txt +0 -0
- {cognee-0.5.1.dev0.dist-info → cognee-0.5.2.dev0.dist-info}/licenses/LICENSE +0 -0
- {cognee-0.5.1.dev0.dist-info → cognee-0.5.2.dev0.dist-info}/licenses/NOTICE.md +0 -0
|
@@ -8,10 +8,13 @@ class CacheDBInterface(ABC):
|
|
|
8
8
|
Provides a common interface for lock acquisition, release, and context-managed locking.
|
|
9
9
|
"""
|
|
10
10
|
|
|
11
|
-
def __init__(
|
|
11
|
+
def __init__(
|
|
12
|
+
self, host: str, port: int, lock_key: str = "default_lock", log_key: str = "usage_logs"
|
|
13
|
+
):
|
|
12
14
|
self.host = host
|
|
13
15
|
self.port = port
|
|
14
16
|
self.lock_key = lock_key
|
|
17
|
+
self.log_key = log_key
|
|
15
18
|
self.lock = None
|
|
16
19
|
|
|
17
20
|
@abstractmethod
|
|
@@ -77,3 +80,37 @@ class CacheDBInterface(ABC):
|
|
|
77
80
|
Gracefully close any async connections.
|
|
78
81
|
"""
|
|
79
82
|
pass
|
|
83
|
+
|
|
84
|
+
@abstractmethod
|
|
85
|
+
async def log_usage(
|
|
86
|
+
self,
|
|
87
|
+
user_id: str,
|
|
88
|
+
log_entry: dict,
|
|
89
|
+
ttl: int | None = 604800,
|
|
90
|
+
):
|
|
91
|
+
"""
|
|
92
|
+
Log usage information (API endpoint calls, MCP tool invocations) to cache.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
user_id: The user ID.
|
|
96
|
+
log_entry: Dictionary containing usage log information.
|
|
97
|
+
ttl: Optional time-to-live (seconds). If provided, the log list expires after this time.
|
|
98
|
+
|
|
99
|
+
Raises:
|
|
100
|
+
CacheConnectionError: If cache connection fails or times out.
|
|
101
|
+
"""
|
|
102
|
+
pass
|
|
103
|
+
|
|
104
|
+
@abstractmethod
|
|
105
|
+
async def get_usage_logs(self, user_id: str, limit: int = 100):
|
|
106
|
+
"""
|
|
107
|
+
Retrieve usage logs for a given user.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
user_id: The user ID.
|
|
111
|
+
limit: Maximum number of logs to retrieve (default: 100).
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
List of usage log entries, most recent first.
|
|
115
|
+
"""
|
|
116
|
+
pass
|
|
@@ -13,6 +13,8 @@ class CacheConfig(BaseSettings):
|
|
|
13
13
|
- cache_port: Port number for the cache service.
|
|
14
14
|
- agentic_lock_expire: Automatic lock expiration time (in seconds).
|
|
15
15
|
- agentic_lock_timeout: Maximum time (in seconds) to wait for the lock release.
|
|
16
|
+
- usage_logging: Enable/disable usage logging for API endpoints and MCP tools.
|
|
17
|
+
- usage_logging_ttl: Time-to-live for usage logs in seconds (default: 7 days).
|
|
16
18
|
"""
|
|
17
19
|
|
|
18
20
|
cache_backend: Literal["redis", "fs"] = "fs"
|
|
@@ -24,6 +26,8 @@ class CacheConfig(BaseSettings):
|
|
|
24
26
|
cache_password: Optional[str] = None
|
|
25
27
|
agentic_lock_expire: int = 240
|
|
26
28
|
agentic_lock_timeout: int = 300
|
|
29
|
+
usage_logging: bool = False
|
|
30
|
+
usage_logging_ttl: int = 604800
|
|
27
31
|
|
|
28
32
|
model_config = SettingsConfigDict(env_file=".env", extra="allow")
|
|
29
33
|
|
|
@@ -38,6 +42,8 @@ class CacheConfig(BaseSettings):
|
|
|
38
42
|
"cache_password": self.cache_password,
|
|
39
43
|
"agentic_lock_expire": self.agentic_lock_expire,
|
|
40
44
|
"agentic_lock_timeout": self.agentic_lock_timeout,
|
|
45
|
+
"usage_logging": self.usage_logging,
|
|
46
|
+
"usage_logging_ttl": self.usage_logging_ttl,
|
|
41
47
|
}
|
|
42
48
|
|
|
43
49
|
|
|
@@ -89,6 +89,27 @@ class FSCacheAdapter(CacheDBInterface):
|
|
|
89
89
|
return None
|
|
90
90
|
return json.loads(value)
|
|
91
91
|
|
|
92
|
+
async def log_usage(
|
|
93
|
+
self,
|
|
94
|
+
user_id: str,
|
|
95
|
+
log_entry: dict,
|
|
96
|
+
ttl: int | None = 604800,
|
|
97
|
+
):
|
|
98
|
+
"""
|
|
99
|
+
Usage logging is not supported in filesystem cache backend.
|
|
100
|
+
This method is a no-op to satisfy the interface.
|
|
101
|
+
"""
|
|
102
|
+
logger.warning("Usage logging not supported in FSCacheAdapter, skipping")
|
|
103
|
+
pass
|
|
104
|
+
|
|
105
|
+
async def get_usage_logs(self, user_id: str, limit: int = 100):
|
|
106
|
+
"""
|
|
107
|
+
Usage logging is not supported in filesystem cache backend.
|
|
108
|
+
This method returns an empty list to satisfy the interface.
|
|
109
|
+
"""
|
|
110
|
+
logger.warning("Usage logging not supported in FSCacheAdapter, returning empty list")
|
|
111
|
+
return []
|
|
112
|
+
|
|
92
113
|
async def close(self):
|
|
93
114
|
if self.cache is not None:
|
|
94
115
|
self.cache.expire()
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
"""Factory to get the appropriate cache coordination engine (e.g., Redis)."""
|
|
2
2
|
|
|
3
3
|
from functools import lru_cache
|
|
4
|
-
import os
|
|
5
4
|
from typing import Optional
|
|
6
5
|
from cognee.infrastructure.databases.cache.config import get_cache_config
|
|
7
6
|
from cognee.infrastructure.databases.cache.cache_db_interface import CacheDBInterface
|
|
@@ -17,6 +16,7 @@ def create_cache_engine(
|
|
|
17
16
|
cache_username: str,
|
|
18
17
|
cache_password: str,
|
|
19
18
|
lock_key: str,
|
|
19
|
+
log_key: str,
|
|
20
20
|
agentic_lock_expire: int = 240,
|
|
21
21
|
agentic_lock_timeout: int = 300,
|
|
22
22
|
):
|
|
@@ -30,6 +30,7 @@ def create_cache_engine(
|
|
|
30
30
|
- cache_username: Username to authenticate with.
|
|
31
31
|
- cache_password: Password to authenticate with.
|
|
32
32
|
- lock_key: Identifier used for the locking resource.
|
|
33
|
+
- log_key: Identifier used for usage logging.
|
|
33
34
|
- agentic_lock_expire: Duration to hold the lock after acquisition.
|
|
34
35
|
- agentic_lock_timeout: Max time to wait for the lock before failing.
|
|
35
36
|
|
|
@@ -37,7 +38,7 @@ def create_cache_engine(
|
|
|
37
38
|
--------
|
|
38
39
|
- CacheDBInterface: An instance of the appropriate cache adapter.
|
|
39
40
|
"""
|
|
40
|
-
if config.caching:
|
|
41
|
+
if config.caching or config.usage_logging:
|
|
41
42
|
from cognee.infrastructure.databases.cache.redis.RedisAdapter import RedisAdapter
|
|
42
43
|
|
|
43
44
|
if config.cache_backend == "redis":
|
|
@@ -47,6 +48,7 @@ def create_cache_engine(
|
|
|
47
48
|
username=cache_username,
|
|
48
49
|
password=cache_password,
|
|
49
50
|
lock_name=lock_key,
|
|
51
|
+
log_key=log_key,
|
|
50
52
|
timeout=agentic_lock_expire,
|
|
51
53
|
blocking_timeout=agentic_lock_timeout,
|
|
52
54
|
)
|
|
@@ -61,7 +63,10 @@ def create_cache_engine(
|
|
|
61
63
|
return None
|
|
62
64
|
|
|
63
65
|
|
|
64
|
-
def get_cache_engine(
|
|
66
|
+
def get_cache_engine(
|
|
67
|
+
lock_key: Optional[str] = "default_lock",
|
|
68
|
+
log_key: Optional[str] = "usage_logs",
|
|
69
|
+
) -> Optional[CacheDBInterface]:
|
|
65
70
|
"""
|
|
66
71
|
Returns a cache adapter instance using current context configuration.
|
|
67
72
|
"""
|
|
@@ -72,6 +77,7 @@ def get_cache_engine(lock_key: Optional[str] = None) -> CacheDBInterface:
|
|
|
72
77
|
cache_username=config.cache_username,
|
|
73
78
|
cache_password=config.cache_password,
|
|
74
79
|
lock_key=lock_key,
|
|
80
|
+
log_key=log_key,
|
|
75
81
|
agentic_lock_expire=config.agentic_lock_expire,
|
|
76
82
|
agentic_lock_timeout=config.agentic_lock_timeout,
|
|
77
83
|
)
|
|
@@ -17,13 +17,14 @@ class RedisAdapter(CacheDBInterface):
|
|
|
17
17
|
host,
|
|
18
18
|
port,
|
|
19
19
|
lock_name="default_lock",
|
|
20
|
+
log_key="usage_logs",
|
|
20
21
|
username=None,
|
|
21
22
|
password=None,
|
|
22
23
|
timeout=240,
|
|
23
24
|
blocking_timeout=300,
|
|
24
25
|
connection_timeout=30,
|
|
25
26
|
):
|
|
26
|
-
super().__init__(host, port, lock_name)
|
|
27
|
+
super().__init__(host, port, lock_name, log_key)
|
|
27
28
|
|
|
28
29
|
self.host = host
|
|
29
30
|
self.port = port
|
|
@@ -177,6 +178,64 @@ class RedisAdapter(CacheDBInterface):
|
|
|
177
178
|
entries = await self.async_redis.lrange(session_key, 0, -1)
|
|
178
179
|
return [json.loads(e) for e in entries]
|
|
179
180
|
|
|
181
|
+
async def log_usage(
|
|
182
|
+
self,
|
|
183
|
+
user_id: str,
|
|
184
|
+
log_entry: dict,
|
|
185
|
+
ttl: int | None = 604800,
|
|
186
|
+
):
|
|
187
|
+
"""
|
|
188
|
+
Log usage information (API endpoint calls, MCP tool invocations) to Redis.
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
user_id: The user ID.
|
|
192
|
+
log_entry: Dictionary containing usage log information.
|
|
193
|
+
ttl: Optional time-to-live (seconds). If provided, the log list expires after this time.
|
|
194
|
+
|
|
195
|
+
Raises:
|
|
196
|
+
CacheConnectionError: If Redis connection fails or times out.
|
|
197
|
+
"""
|
|
198
|
+
try:
|
|
199
|
+
usage_logs_key = f"{self.log_key}:{user_id}"
|
|
200
|
+
|
|
201
|
+
await self.async_redis.rpush(usage_logs_key, json.dumps(log_entry))
|
|
202
|
+
|
|
203
|
+
if ttl is not None:
|
|
204
|
+
await self.async_redis.expire(usage_logs_key, ttl)
|
|
205
|
+
|
|
206
|
+
except (redis.ConnectionError, redis.TimeoutError) as e:
|
|
207
|
+
error_msg = f"Redis connection error while logging usage: {str(e)}"
|
|
208
|
+
logger.error(error_msg)
|
|
209
|
+
raise CacheConnectionError(error_msg) from e
|
|
210
|
+
except Exception as e:
|
|
211
|
+
error_msg = f"Unexpected error while logging usage to Redis: {str(e)}"
|
|
212
|
+
logger.error(error_msg)
|
|
213
|
+
raise CacheConnectionError(error_msg) from e
|
|
214
|
+
|
|
215
|
+
async def get_usage_logs(self, user_id: str, limit: int = 100):
|
|
216
|
+
"""
|
|
217
|
+
Retrieve usage logs for a given user.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
user_id: The user ID.
|
|
221
|
+
limit: Maximum number of logs to retrieve (default: 100).
|
|
222
|
+
|
|
223
|
+
Returns:
|
|
224
|
+
List of usage log entries, most recent first.
|
|
225
|
+
"""
|
|
226
|
+
try:
|
|
227
|
+
usage_logs_key = f"{self.log_key}:{user_id}"
|
|
228
|
+
entries = await self.async_redis.lrange(usage_logs_key, -limit, -1)
|
|
229
|
+
return [json.loads(e) for e in reversed(entries)] if entries else []
|
|
230
|
+
except (redis.ConnectionError, redis.TimeoutError) as e:
|
|
231
|
+
error_msg = f"Redis connection error while retrieving usage logs: {str(e)}"
|
|
232
|
+
logger.error(error_msg)
|
|
233
|
+
raise CacheConnectionError(error_msg) from e
|
|
234
|
+
except Exception as e:
|
|
235
|
+
error_msg = f"Unexpected error while retrieving usage logs from Redis: {str(e)}"
|
|
236
|
+
logger.error(error_msg)
|
|
237
|
+
raise CacheConnectionError(error_msg) from e
|
|
238
|
+
|
|
180
239
|
async def close(self):
|
|
181
240
|
"""
|
|
182
241
|
Gracefully close the async Redis connection.
|
cognee/infrastructure/databases/dataset_database_handler/supported_dataset_database_handlers.py
CHANGED
|
@@ -7,6 +7,9 @@ from cognee.infrastructure.databases.vector.lancedb.LanceDBDatasetDatabaseHandle
|
|
|
7
7
|
from cognee.infrastructure.databases.graph.kuzu.KuzuDatasetDatabaseHandler import (
|
|
8
8
|
KuzuDatasetDatabaseHandler,
|
|
9
9
|
)
|
|
10
|
+
from cognee.infrastructure.databases.vector.pgvector.PGVectorDatasetDatabaseHandler import (
|
|
11
|
+
PGVectorDatasetDatabaseHandler,
|
|
12
|
+
)
|
|
10
13
|
|
|
11
14
|
supported_dataset_database_handlers = {
|
|
12
15
|
"neo4j_aura_dev": {
|
|
@@ -14,5 +17,9 @@ supported_dataset_database_handlers = {
|
|
|
14
17
|
"handler_provider": "neo4j",
|
|
15
18
|
},
|
|
16
19
|
"lancedb": {"handler_instance": LanceDBDatasetDatabaseHandler, "handler_provider": "lancedb"},
|
|
20
|
+
"pgvector": {
|
|
21
|
+
"handler_instance": PGVectorDatasetDatabaseHandler,
|
|
22
|
+
"handler_provider": "pgvector",
|
|
23
|
+
},
|
|
17
24
|
"kuzu": {"handler_instance": KuzuDatasetDatabaseHandler, "handler_provider": "kuzu"},
|
|
18
25
|
}
|
|
@@ -24,7 +24,6 @@ async def get_graph_engine() -> GraphDBInterface:
|
|
|
24
24
|
return graph_client
|
|
25
25
|
|
|
26
26
|
|
|
27
|
-
@lru_cache
|
|
28
27
|
def create_graph_engine(
|
|
29
28
|
graph_database_provider,
|
|
30
29
|
graph_file_path,
|
|
@@ -35,6 +34,35 @@ def create_graph_engine(
|
|
|
35
34
|
graph_database_port="",
|
|
36
35
|
graph_database_key="",
|
|
37
36
|
graph_dataset_database_handler="",
|
|
37
|
+
):
|
|
38
|
+
"""
|
|
39
|
+
Wrapper function to call create graph engine with caching.
|
|
40
|
+
For a detailed description, see _create_graph_engine.
|
|
41
|
+
"""
|
|
42
|
+
return _create_graph_engine(
|
|
43
|
+
graph_database_provider,
|
|
44
|
+
graph_file_path,
|
|
45
|
+
graph_database_url,
|
|
46
|
+
graph_database_name,
|
|
47
|
+
graph_database_username,
|
|
48
|
+
graph_database_password,
|
|
49
|
+
graph_database_port,
|
|
50
|
+
graph_database_key,
|
|
51
|
+
graph_dataset_database_handler,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@lru_cache
|
|
56
|
+
def _create_graph_engine(
|
|
57
|
+
graph_database_provider,
|
|
58
|
+
graph_file_path,
|
|
59
|
+
graph_database_url="",
|
|
60
|
+
graph_database_name="",
|
|
61
|
+
graph_database_username="",
|
|
62
|
+
graph_database_password="",
|
|
63
|
+
graph_database_port="",
|
|
64
|
+
graph_database_key="",
|
|
65
|
+
graph_dataset_database_handler="",
|
|
38
66
|
):
|
|
39
67
|
"""
|
|
40
68
|
Create a graph engine based on the specified provider type.
|
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
import os
|
|
2
|
+
import aiohttp
|
|
2
3
|
import asyncio
|
|
3
|
-
import requests
|
|
4
4
|
import base64
|
|
5
5
|
import hashlib
|
|
6
6
|
from uuid import UUID
|
|
7
7
|
from typing import Optional
|
|
8
|
+
from urllib.parse import urlparse
|
|
8
9
|
from cryptography.fernet import Fernet
|
|
10
|
+
from aiohttp import BasicAuth
|
|
9
11
|
|
|
10
12
|
from cognee.infrastructure.databases.graph import get_graph_config
|
|
11
13
|
from cognee.modules.users.models import User, DatasetDatabase
|
|
@@ -23,7 +25,6 @@ class Neo4jAuraDevDatasetDatabaseHandler(DatasetDatabaseHandlerInterface):
|
|
|
23
25
|
|
|
24
26
|
Quality of life improvements:
|
|
25
27
|
- Allow configuration of different Neo4j Aura plans and regions.
|
|
26
|
-
- Requests should be made async, currently a blocking requests library is used.
|
|
27
28
|
"""
|
|
28
29
|
|
|
29
30
|
@classmethod
|
|
@@ -49,6 +50,7 @@ class Neo4jAuraDevDatasetDatabaseHandler(DatasetDatabaseHandlerInterface):
|
|
|
49
50
|
graph_db_name = f"{dataset_id}"
|
|
50
51
|
|
|
51
52
|
# Client credentials and encryption
|
|
53
|
+
# Note: Should not be used as class variables so that they are not persisted in memory longer than needed
|
|
52
54
|
client_id = os.environ.get("NEO4J_CLIENT_ID", None)
|
|
53
55
|
client_secret = os.environ.get("NEO4J_CLIENT_SECRET", None)
|
|
54
56
|
tenant_id = os.environ.get("NEO4J_TENANT_ID", None)
|
|
@@ -63,22 +65,13 @@ class Neo4jAuraDevDatasetDatabaseHandler(DatasetDatabaseHandlerInterface):
|
|
|
63
65
|
"NEO4J_CLIENT_ID, NEO4J_CLIENT_SECRET, and NEO4J_TENANT_ID environment variables must be set to use Neo4j Aura DatasetDatabase Handling."
|
|
64
66
|
)
|
|
65
67
|
|
|
66
|
-
|
|
67
|
-
def get_aura_token(client_id: str, client_secret: str) -> dict:
|
|
68
|
-
url = "https://api.neo4j.io/oauth/token"
|
|
69
|
-
data = {"grant_type": "client_credentials"} # sent as application/x-www-form-urlencoded
|
|
70
|
-
|
|
71
|
-
resp = requests.post(url, data=data, auth=(client_id, client_secret))
|
|
72
|
-
resp.raise_for_status() # raises if the request failed
|
|
73
|
-
return resp.json()
|
|
74
|
-
|
|
75
|
-
resp = get_aura_token(client_id, client_secret)
|
|
68
|
+
resp_token = await cls._get_aura_token(client_id, client_secret)
|
|
76
69
|
|
|
77
70
|
url = "https://api.neo4j.io/v1/instances"
|
|
78
71
|
|
|
79
72
|
headers = {
|
|
80
73
|
"accept": "application/json",
|
|
81
|
-
"Authorization": f"Bearer {
|
|
74
|
+
"Authorization": f"Bearer {resp_token['access_token']}",
|
|
82
75
|
"Content-Type": "application/json",
|
|
83
76
|
}
|
|
84
77
|
|
|
@@ -96,31 +89,38 @@ class Neo4jAuraDevDatasetDatabaseHandler(DatasetDatabaseHandlerInterface):
|
|
|
96
89
|
"cloud_provider": "gcp",
|
|
97
90
|
}
|
|
98
91
|
|
|
99
|
-
|
|
92
|
+
async def _create_database_instance_request():
|
|
93
|
+
async with aiohttp.ClientSession() as session:
|
|
94
|
+
async with session.post(url, headers=headers, json=payload) as resp:
|
|
95
|
+
resp.raise_for_status()
|
|
96
|
+
return await resp.json()
|
|
97
|
+
|
|
98
|
+
resp_create = await _create_database_instance_request()
|
|
100
99
|
|
|
101
100
|
graph_db_name = "neo4j" # Has to be 'neo4j' for Aura
|
|
102
|
-
graph_db_url =
|
|
103
|
-
graph_db_key =
|
|
104
|
-
graph_db_username =
|
|
105
|
-
graph_db_password =
|
|
101
|
+
graph_db_url = resp_create["data"]["connection_url"]
|
|
102
|
+
graph_db_key = resp_token["access_token"]
|
|
103
|
+
graph_db_username = resp_create["data"]["username"]
|
|
104
|
+
graph_db_password = resp_create["data"]["password"]
|
|
106
105
|
|
|
107
106
|
async def _wait_for_neo4j_instance_provisioning(instance_id: str, headers: dict):
|
|
108
107
|
# Poll until the instance is running
|
|
109
108
|
status_url = f"https://api.neo4j.io/v1/instances/{instance_id}"
|
|
110
109
|
status = ""
|
|
111
110
|
for attempt in range(30): # Try for up to ~5 minutes
|
|
112
|
-
|
|
113
|
-
status_url, headers=headers
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
111
|
+
async with aiohttp.ClientSession() as session:
|
|
112
|
+
async with session.get(status_url, headers=headers) as resp:
|
|
113
|
+
resp.raise_for_status()
|
|
114
|
+
status_resp = await resp.json()
|
|
115
|
+
status = status_resp["data"]["status"]
|
|
116
|
+
if status.lower() == "running":
|
|
117
|
+
return
|
|
118
|
+
await asyncio.sleep(10)
|
|
119
119
|
raise TimeoutError(
|
|
120
120
|
f"Neo4j instance '{graph_db_name}' did not become ready within 5 minutes. Status: {status}"
|
|
121
121
|
)
|
|
122
122
|
|
|
123
|
-
instance_id =
|
|
123
|
+
instance_id = resp_create["data"]["id"]
|
|
124
124
|
await _wait_for_neo4j_instance_provisioning(instance_id, headers)
|
|
125
125
|
|
|
126
126
|
encrypted_db_password_bytes = cipher.encrypt(graph_db_password.encode())
|
|
@@ -165,4 +165,39 @@ class Neo4jAuraDevDatasetDatabaseHandler(DatasetDatabaseHandlerInterface):
|
|
|
165
165
|
|
|
166
166
|
@classmethod
|
|
167
167
|
async def delete_dataset(cls, dataset_database: DatasetDatabase):
|
|
168
|
-
|
|
168
|
+
# Get dataset database information and credentials
|
|
169
|
+
dataset_database = await cls.resolve_dataset_connection_info(dataset_database)
|
|
170
|
+
|
|
171
|
+
parsed_url = urlparse(dataset_database.graph_database_url)
|
|
172
|
+
instance_id = parsed_url.hostname.split(".")[0]
|
|
173
|
+
|
|
174
|
+
url = f"https://api.neo4j.io/v1/instances/{instance_id}"
|
|
175
|
+
|
|
176
|
+
# Get access token for Neo4j Aura API
|
|
177
|
+
# Client credentials
|
|
178
|
+
client_id = os.environ.get("NEO4J_CLIENT_ID", None)
|
|
179
|
+
client_secret = os.environ.get("NEO4J_CLIENT_SECRET", None)
|
|
180
|
+
resp = await cls._get_aura_token(client_id, client_secret)
|
|
181
|
+
|
|
182
|
+
headers = {
|
|
183
|
+
"accept": "application/json",
|
|
184
|
+
"Authorization": f"Bearer {resp['access_token']}",
|
|
185
|
+
"Content-Type": "application/json",
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
async with aiohttp.ClientSession() as session:
|
|
189
|
+
async with session.delete(url, headers=headers) as resp:
|
|
190
|
+
resp.raise_for_status()
|
|
191
|
+
return await resp.json()
|
|
192
|
+
|
|
193
|
+
@classmethod
|
|
194
|
+
async def _get_aura_token(cls, client_id: str, client_secret: str) -> dict:
|
|
195
|
+
url = "https://api.neo4j.io/oauth/token"
|
|
196
|
+
data = {"grant_type": "client_credentials"} # sent as application/x-www-form-urlencoded
|
|
197
|
+
|
|
198
|
+
async with aiohttp.ClientSession() as session:
|
|
199
|
+
async with session.post(
|
|
200
|
+
url, data=data, auth=BasicAuth(client_id, client_secret)
|
|
201
|
+
) as resp:
|
|
202
|
+
resp.raise_for_status()
|
|
203
|
+
return await resp.json()
|
|
@@ -236,6 +236,7 @@ class NeptuneAnalyticsAdapter(NeptuneGraphDB, VectorDBInterface):
|
|
|
236
236
|
query_vector: Optional[List[float]] = None,
|
|
237
237
|
limit: Optional[int] = None,
|
|
238
238
|
with_vector: bool = False,
|
|
239
|
+
include_payload: bool = False, # TODO: Add support for this parameter
|
|
239
240
|
):
|
|
240
241
|
"""
|
|
241
242
|
Perform a search in the specified collection using either a text query or a vector
|
|
@@ -290,7 +291,7 @@ class NeptuneAnalyticsAdapter(NeptuneGraphDB, VectorDBInterface):
|
|
|
290
291
|
query_string = f"""
|
|
291
292
|
CALL neptune.algo.vectors.topKByEmbeddingWithFiltering({{
|
|
292
293
|
topK: {limit},
|
|
293
|
-
embedding: {embedding},
|
|
294
|
+
embedding: {embedding},
|
|
294
295
|
nodeFilter: {{ equals: {{property: '{self._COLLECTION_PREFIX}', value: '{collection_name}'}} }}
|
|
295
296
|
}}
|
|
296
297
|
)
|
|
@@ -299,7 +300,7 @@ class NeptuneAnalyticsAdapter(NeptuneGraphDB, VectorDBInterface):
|
|
|
299
300
|
|
|
300
301
|
if with_vector:
|
|
301
302
|
query_string += """
|
|
302
|
-
WITH node, score, id(node) as node_id
|
|
303
|
+
WITH node, score, id(node) as node_id
|
|
303
304
|
MATCH (n)
|
|
304
305
|
WHERE id(n) = id(node)
|
|
305
306
|
CALL neptune.algo.vectors.get(n)
|
|
@@ -319,7 +320,12 @@ class NeptuneAnalyticsAdapter(NeptuneGraphDB, VectorDBInterface):
|
|
|
319
320
|
self._na_exception_handler(e, query_string)
|
|
320
321
|
|
|
321
322
|
async def batch_search(
|
|
322
|
-
self,
|
|
323
|
+
self,
|
|
324
|
+
collection_name: str,
|
|
325
|
+
query_texts: List[str],
|
|
326
|
+
limit: int,
|
|
327
|
+
with_vectors: bool = False,
|
|
328
|
+
include_payload: bool = False,
|
|
323
329
|
):
|
|
324
330
|
"""
|
|
325
331
|
Perform a batch search using multiple text queries against a collection.
|
|
@@ -342,7 +348,14 @@ class NeptuneAnalyticsAdapter(NeptuneGraphDB, VectorDBInterface):
|
|
|
342
348
|
data_vectors = await self.embedding_engine.embed_text(query_texts)
|
|
343
349
|
return await asyncio.gather(
|
|
344
350
|
*[
|
|
345
|
-
self.search(
|
|
351
|
+
self.search(
|
|
352
|
+
collection_name,
|
|
353
|
+
None,
|
|
354
|
+
vector,
|
|
355
|
+
limit,
|
|
356
|
+
with_vectors,
|
|
357
|
+
include_payload=include_payload,
|
|
358
|
+
)
|
|
346
359
|
for vector in data_vectors
|
|
347
360
|
]
|
|
348
361
|
)
|
|
@@ -8,7 +8,7 @@ from typing import AsyncGenerator, List
|
|
|
8
8
|
from contextlib import asynccontextmanager
|
|
9
9
|
from sqlalchemy.orm import joinedload
|
|
10
10
|
from sqlalchemy.exc import NoResultFound
|
|
11
|
-
from sqlalchemy import NullPool, text, select, MetaData, Table, delete, inspect
|
|
11
|
+
from sqlalchemy import NullPool, text, select, MetaData, Table, delete, inspect, URL
|
|
12
12
|
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
|
|
13
13
|
|
|
14
14
|
from cognee.modules.data.models.Data import Data
|
|
@@ -549,6 +549,7 @@ class SQLAlchemyAdapter:
|
|
|
549
549
|
)
|
|
550
550
|
await connection.execute(drop_table_query)
|
|
551
551
|
metadata.clear()
|
|
552
|
+
|
|
552
553
|
except Exception as e:
|
|
553
554
|
logger.error(f"Error deleting database: {e}")
|
|
554
555
|
raise e
|
|
@@ -355,6 +355,7 @@ class ChromaDBAdapter(VectorDBInterface):
|
|
|
355
355
|
limit: Optional[int] = 15,
|
|
356
356
|
with_vector: bool = False,
|
|
357
357
|
normalized: bool = True,
|
|
358
|
+
include_payload: bool = False, # TODO: Add support for this parameter when set to False
|
|
358
359
|
):
|
|
359
360
|
"""
|
|
360
361
|
Search for items in a collection using either a text or a vector query.
|
|
@@ -441,6 +442,7 @@ class ChromaDBAdapter(VectorDBInterface):
|
|
|
441
442
|
query_texts: List[str],
|
|
442
443
|
limit: int = 5,
|
|
443
444
|
with_vectors: bool = False,
|
|
445
|
+
include_payload: bool = False,
|
|
444
446
|
):
|
|
445
447
|
"""
|
|
446
448
|
Perform multiple searches in a single request for efficiency, returning results for each
|
|
@@ -29,6 +29,9 @@ class VectorConfig(BaseSettings):
|
|
|
29
29
|
vector_db_key: str = ""
|
|
30
30
|
vector_db_provider: str = "lancedb"
|
|
31
31
|
vector_dataset_database_handler: str = "lancedb"
|
|
32
|
+
vector_db_username: str = ""
|
|
33
|
+
vector_db_password: str = ""
|
|
34
|
+
vector_db_host: str = ""
|
|
32
35
|
|
|
33
36
|
model_config = SettingsConfigDict(env_file=".env", extra="allow")
|
|
34
37
|
|
|
@@ -65,6 +68,9 @@ class VectorConfig(BaseSettings):
|
|
|
65
68
|
"vector_db_key": self.vector_db_key,
|
|
66
69
|
"vector_db_provider": self.vector_db_provider,
|
|
67
70
|
"vector_dataset_database_handler": self.vector_dataset_database_handler,
|
|
71
|
+
"vector_db_username": self.vector_db_username,
|
|
72
|
+
"vector_db_password": self.vector_db_password,
|
|
73
|
+
"vector_db_host": self.vector_db_host,
|
|
68
74
|
}
|
|
69
75
|
|
|
70
76
|
|