cognee 0.2.1.dev7__py3-none-any.whl → 0.2.2.dev1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognee/api/client.py +44 -4
- cognee/api/health.py +332 -0
- cognee/api/v1/add/add.py +5 -2
- cognee/api/v1/add/routers/get_add_router.py +3 -0
- cognee/api/v1/cognify/code_graph_pipeline.py +3 -1
- cognee/api/v1/cognify/cognify.py +8 -0
- cognee/api/v1/cognify/routers/get_cognify_router.py +8 -1
- cognee/api/v1/config/config.py +3 -1
- cognee/api/v1/datasets/routers/get_datasets_router.py +2 -8
- cognee/api/v1/delete/delete.py +16 -12
- cognee/api/v1/responses/routers/get_responses_router.py +3 -1
- cognee/api/v1/search/search.py +10 -0
- cognee/api/v1/settings/routers/get_settings_router.py +0 -2
- cognee/base_config.py +1 -0
- cognee/eval_framework/evaluation/direct_llm_eval_adapter.py +5 -6
- cognee/infrastructure/databases/graph/config.py +2 -0
- cognee/infrastructure/databases/graph/get_graph_engine.py +58 -12
- cognee/infrastructure/databases/graph/graph_db_interface.py +15 -10
- cognee/infrastructure/databases/graph/kuzu/adapter.py +43 -16
- cognee/infrastructure/databases/graph/kuzu/kuzu_migrate.py +281 -0
- cognee/infrastructure/databases/graph/neo4j_driver/adapter.py +151 -77
- cognee/infrastructure/databases/graph/neptune_driver/__init__.py +15 -0
- cognee/infrastructure/databases/graph/neptune_driver/adapter.py +1427 -0
- cognee/infrastructure/databases/graph/neptune_driver/exceptions.py +115 -0
- cognee/infrastructure/databases/graph/neptune_driver/neptune_utils.py +224 -0
- cognee/infrastructure/databases/graph/networkx/adapter.py +3 -3
- cognee/infrastructure/databases/hybrid/neptune_analytics/NeptuneAnalyticsAdapter.py +449 -0
- cognee/infrastructure/databases/relational/sqlalchemy/SqlAlchemyAdapter.py +11 -3
- cognee/infrastructure/databases/vector/chromadb/ChromaDBAdapter.py +8 -3
- cognee/infrastructure/databases/vector/create_vector_engine.py +31 -23
- cognee/infrastructure/databases/vector/embeddings/FastembedEmbeddingEngine.py +3 -1
- cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py +21 -6
- cognee/infrastructure/databases/vector/embeddings/OllamaEmbeddingEngine.py +4 -3
- cognee/infrastructure/databases/vector/embeddings/get_embedding_engine.py +3 -1
- cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py +22 -16
- cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py +36 -34
- cognee/infrastructure/databases/vector/vector_db_interface.py +78 -7
- cognee/infrastructure/files/utils/get_data_file_path.py +39 -0
- cognee/infrastructure/files/utils/guess_file_type.py +2 -2
- cognee/infrastructure/files/utils/open_data_file.py +4 -23
- cognee/infrastructure/llm/LLMGateway.py +137 -0
- cognee/infrastructure/llm/__init__.py +14 -4
- cognee/infrastructure/llm/config.py +29 -1
- cognee/infrastructure/llm/prompts/answer_hotpot_question.txt +1 -1
- cognee/infrastructure/llm/prompts/answer_hotpot_using_cognee_search.txt +1 -1
- cognee/infrastructure/llm/prompts/answer_simple_question.txt +1 -1
- cognee/infrastructure/llm/prompts/answer_simple_question_restricted.txt +1 -1
- cognee/infrastructure/llm/prompts/categorize_categories.txt +1 -1
- cognee/infrastructure/llm/prompts/classify_content.txt +1 -1
- cognee/infrastructure/llm/prompts/context_for_question.txt +1 -1
- cognee/infrastructure/llm/prompts/graph_context_for_question.txt +1 -1
- cognee/infrastructure/llm/prompts/natural_language_retriever_system.txt +1 -1
- cognee/infrastructure/llm/prompts/patch_gen_instructions.txt +1 -1
- cognee/infrastructure/llm/prompts/search_type_selector_prompt.txt +130 -0
- cognee/infrastructure/llm/prompts/summarize_code.txt +2 -2
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/__init__.py +57 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/async_client.py +533 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/config.py +94 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/globals.py +37 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/inlinedbaml.py +21 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/parser.py +131 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/runtime.py +266 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/stream_types.py +137 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/sync_client.py +550 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/tracing.py +26 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/type_builder.py +962 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/type_map.py +52 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_client/types.py +166 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extract_categories.baml +109 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extract_content_graph.baml +343 -0
- cognee/{modules/data → infrastructure/llm/structured_output_framework/baml/baml_src}/extraction/__init__.py +1 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extraction/extract_summary.py +89 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/extraction/knowledge_graph/extract_content_graph.py +33 -0
- cognee/infrastructure/llm/structured_output_framework/baml/baml_src/generators.baml +18 -0
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/extraction/__init__.py +3 -0
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/extraction/extract_categories.py +12 -0
- cognee/{modules/data → infrastructure/llm/structured_output_framework/litellm_instructor}/extraction/extract_summary.py +16 -7
- cognee/{modules/data → infrastructure/llm/structured_output_framework/litellm_instructor}/extraction/knowledge_graph/extract_content_graph.py +7 -6
- cognee/infrastructure/llm/{anthropic → structured_output_framework/litellm_instructor/llm/anthropic}/adapter.py +10 -4
- cognee/infrastructure/llm/{gemini → structured_output_framework/litellm_instructor/llm/gemini}/adapter.py +6 -5
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/generic_llm_api/__init__.py +0 -0
- cognee/infrastructure/llm/{generic_llm_api → structured_output_framework/litellm_instructor/llm/generic_llm_api}/adapter.py +7 -3
- cognee/infrastructure/llm/{get_llm_client.py → structured_output_framework/litellm_instructor/llm/get_llm_client.py} +18 -6
- cognee/infrastructure/llm/{llm_interface.py → structured_output_framework/litellm_instructor/llm/llm_interface.py} +2 -2
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/ollama/__init__.py +0 -0
- cognee/infrastructure/llm/{ollama → structured_output_framework/litellm_instructor/llm/ollama}/adapter.py +4 -2
- cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/openai/__init__.py +0 -0
- cognee/infrastructure/llm/{openai → structured_output_framework/litellm_instructor/llm/openai}/adapter.py +6 -4
- cognee/infrastructure/llm/{rate_limiter.py → structured_output_framework/litellm_instructor/llm/rate_limiter.py} +0 -5
- cognee/infrastructure/llm/tokenizer/Gemini/adapter.py +4 -2
- cognee/infrastructure/llm/tokenizer/TikToken/adapter.py +7 -3
- cognee/infrastructure/llm/tokenizer/__init__.py +4 -0
- cognee/infrastructure/llm/utils.py +3 -1
- cognee/infrastructure/loaders/LoaderEngine.py +156 -0
- cognee/infrastructure/loaders/LoaderInterface.py +73 -0
- cognee/infrastructure/loaders/__init__.py +18 -0
- cognee/infrastructure/loaders/core/__init__.py +7 -0
- cognee/infrastructure/loaders/core/audio_loader.py +98 -0
- cognee/infrastructure/loaders/core/image_loader.py +114 -0
- cognee/infrastructure/loaders/core/text_loader.py +90 -0
- cognee/infrastructure/loaders/create_loader_engine.py +32 -0
- cognee/infrastructure/loaders/external/__init__.py +22 -0
- cognee/infrastructure/loaders/external/pypdf_loader.py +96 -0
- cognee/infrastructure/loaders/external/unstructured_loader.py +127 -0
- cognee/infrastructure/loaders/get_loader_engine.py +18 -0
- cognee/infrastructure/loaders/supported_loaders.py +18 -0
- cognee/infrastructure/loaders/use_loader.py +21 -0
- cognee/infrastructure/loaders/utils/__init__.py +0 -0
- cognee/modules/data/methods/__init__.py +1 -0
- cognee/modules/data/methods/get_authorized_dataset.py +23 -0
- cognee/modules/data/models/Data.py +13 -3
- cognee/modules/data/processing/document_types/AudioDocument.py +2 -2
- cognee/modules/data/processing/document_types/ImageDocument.py +2 -2
- cognee/modules/data/processing/document_types/PdfDocument.py +4 -11
- cognee/modules/data/processing/document_types/UnstructuredDocument.py +2 -5
- cognee/modules/engine/utils/generate_edge_id.py +5 -0
- cognee/modules/graph/cognee_graph/CogneeGraph.py +45 -35
- cognee/modules/graph/methods/get_formatted_graph_data.py +8 -2
- cognee/modules/graph/utils/get_graph_from_model.py +93 -101
- cognee/modules/ingestion/data_types/TextData.py +8 -2
- cognee/modules/ingestion/save_data_to_file.py +1 -1
- cognee/modules/pipelines/exceptions/__init__.py +1 -0
- cognee/modules/pipelines/exceptions/exceptions.py +12 -0
- cognee/modules/pipelines/models/DataItemStatus.py +5 -0
- cognee/modules/pipelines/models/PipelineRunInfo.py +6 -0
- cognee/modules/pipelines/models/__init__.py +1 -0
- cognee/modules/pipelines/operations/pipeline.py +10 -2
- cognee/modules/pipelines/operations/run_tasks.py +252 -20
- cognee/modules/pipelines/operations/run_tasks_distributed.py +1 -1
- cognee/modules/retrieval/chunks_retriever.py +23 -1
- cognee/modules/retrieval/code_retriever.py +66 -9
- cognee/modules/retrieval/completion_retriever.py +11 -9
- cognee/modules/retrieval/context_providers/TripletSearchContextProvider.py +0 -2
- cognee/modules/retrieval/graph_completion_context_extension_retriever.py +0 -2
- cognee/modules/retrieval/graph_completion_cot_retriever.py +8 -9
- cognee/modules/retrieval/graph_completion_retriever.py +1 -1
- cognee/modules/retrieval/insights_retriever.py +4 -0
- cognee/modules/retrieval/natural_language_retriever.py +9 -15
- cognee/modules/retrieval/summaries_retriever.py +23 -1
- cognee/modules/retrieval/utils/brute_force_triplet_search.py +23 -4
- cognee/modules/retrieval/utils/completion.py +6 -9
- cognee/modules/retrieval/utils/description_to_codepart_search.py +2 -3
- cognee/modules/search/methods/search.py +5 -1
- cognee/modules/search/operations/__init__.py +1 -0
- cognee/modules/search/operations/select_search_type.py +42 -0
- cognee/modules/search/types/SearchType.py +1 -0
- cognee/modules/settings/get_settings.py +0 -8
- cognee/modules/settings/save_vector_db_config.py +1 -1
- cognee/shared/data_models.py +3 -1
- cognee/shared/logging_utils.py +0 -5
- cognee/tasks/chunk_naive_llm_classifier/chunk_naive_llm_classifier.py +2 -2
- cognee/tasks/documents/extract_chunks_from_documents.py +10 -12
- cognee/tasks/entity_completion/entity_extractors/llm_entity_extractor.py +4 -6
- cognee/tasks/graph/cascade_extract/utils/extract_content_nodes_and_relationship_names.py +4 -6
- cognee/tasks/graph/cascade_extract/utils/extract_edge_triplets.py +6 -7
- cognee/tasks/graph/cascade_extract/utils/extract_nodes.py +4 -7
- cognee/tasks/graph/extract_graph_from_code.py +3 -2
- cognee/tasks/graph/extract_graph_from_data.py +4 -3
- cognee/tasks/graph/infer_data_ontology.py +5 -6
- cognee/tasks/ingestion/data_item_to_text_file.py +79 -0
- cognee/tasks/ingestion/ingest_data.py +91 -61
- cognee/tasks/ingestion/resolve_data_directories.py +3 -0
- cognee/tasks/repo_processor/get_repo_file_dependencies.py +3 -0
- cognee/tasks/storage/index_data_points.py +1 -1
- cognee/tasks/storage/index_graph_edges.py +4 -1
- cognee/tasks/summarization/summarize_code.py +2 -3
- cognee/tasks/summarization/summarize_text.py +3 -2
- cognee/tests/test_cognee_server_start.py +12 -7
- cognee/tests/test_deduplication.py +2 -2
- cognee/tests/test_deletion.py +58 -17
- cognee/tests/test_graph_visualization_permissions.py +161 -0
- cognee/tests/test_neptune_analytics_graph.py +309 -0
- cognee/tests/test_neptune_analytics_hybrid.py +176 -0
- cognee/tests/{test_weaviate.py → test_neptune_analytics_vector.py} +86 -11
- cognee/tests/test_pgvector.py +5 -5
- cognee/tests/test_s3.py +1 -6
- cognee/tests/unit/infrastructure/databases/test_rate_limiter.py +11 -10
- cognee/tests/unit/infrastructure/databases/vector/__init__.py +0 -0
- cognee/tests/unit/infrastructure/mock_embedding_engine.py +1 -1
- cognee/tests/unit/infrastructure/test_embedding_rate_limiting_realistic.py +5 -5
- cognee/tests/unit/infrastructure/test_rate_limiting_realistic.py +6 -4
- cognee/tests/unit/infrastructure/test_rate_limiting_retry.py +1 -1
- cognee/tests/unit/interfaces/graph/get_graph_from_model_unit_test.py +61 -3
- cognee/tests/unit/modules/retrieval/graph_completion_retriever_test.py +84 -9
- cognee/tests/unit/modules/search/search_methods_test.py +55 -0
- {cognee-0.2.1.dev7.dist-info → cognee-0.2.2.dev1.dist-info}/METADATA +13 -9
- {cognee-0.2.1.dev7.dist-info → cognee-0.2.2.dev1.dist-info}/RECORD +203 -164
- cognee/infrastructure/databases/vector/pinecone/adapter.py +0 -8
- cognee/infrastructure/databases/vector/qdrant/QDrantAdapter.py +0 -514
- cognee/infrastructure/databases/vector/qdrant/__init__.py +0 -2
- cognee/infrastructure/databases/vector/weaviate_db/WeaviateAdapter.py +0 -527
- cognee/infrastructure/databases/vector/weaviate_db/__init__.py +0 -1
- cognee/modules/data/extraction/extract_categories.py +0 -14
- cognee/tests/test_qdrant.py +0 -99
- distributed/Dockerfile +0 -34
- distributed/app.py +0 -4
- distributed/entrypoint.py +0 -71
- distributed/entrypoint.sh +0 -5
- distributed/modal_image.py +0 -11
- distributed/queues.py +0 -5
- distributed/tasks/queued_add_data_points.py +0 -13
- distributed/tasks/queued_add_edges.py +0 -13
- distributed/tasks/queued_add_nodes.py +0 -13
- distributed/test.py +0 -28
- distributed/utils.py +0 -19
- distributed/workers/data_point_saving_worker.py +0 -93
- distributed/workers/graph_saving_worker.py +0 -104
- /cognee/infrastructure/databases/{graph/memgraph → hybrid/neptune_analytics}/__init__.py +0 -0
- /cognee/infrastructure/{llm → databases/vector/embeddings}/embedding_rate_limiter.py +0 -0
- /cognee/infrastructure/{databases/vector/pinecone → llm/structured_output_framework}/__init__.py +0 -0
- /cognee/infrastructure/llm/{anthropic → structured_output_framework/baml/baml_src}/__init__.py +0 -0
- /cognee/infrastructure/llm/{gemini/__init__.py → structured_output_framework/baml/baml_src/extraction/extract_categories.py} +0 -0
- /cognee/infrastructure/llm/{generic_llm_api → structured_output_framework/baml/baml_src/extraction/knowledge_graph}/__init__.py +0 -0
- /cognee/infrastructure/llm/{ollama → structured_output_framework/litellm_instructor}/__init__.py +0 -0
- /cognee/{modules/data → infrastructure/llm/structured_output_framework/litellm_instructor}/extraction/knowledge_graph/__init__.py +0 -0
- /cognee/{modules/data → infrastructure/llm/structured_output_framework/litellm_instructor}/extraction/texts.json +0 -0
- /cognee/infrastructure/llm/{openai → structured_output_framework/litellm_instructor/llm}/__init__.py +0 -0
- {distributed → cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/anthropic}/__init__.py +0 -0
- {distributed/tasks → cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/gemini}/__init__.py +0 -0
- /cognee/modules/data/{extraction/knowledge_graph → methods}/add_model_class_to_graph.py +0 -0
- {cognee-0.2.1.dev7.dist-info → cognee-0.2.2.dev1.dist-info}/WHEEL +0 -0
- {cognee-0.2.1.dev7.dist-info → cognee-0.2.2.dev1.dist-info}/licenses/LICENSE +0 -0
- {cognee-0.2.1.dev7.dist-info → cognee-0.2.2.dev1.dist-info}/licenses/NOTICE.md +0 -0
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import os
|
|
3
|
+
import pathlib
|
|
4
|
+
|
|
5
|
+
import pytest
|
|
6
|
+
import pytest_asyncio
|
|
7
|
+
from httpx import ASGITransport, AsyncClient
|
|
8
|
+
|
|
9
|
+
import cognee
|
|
10
|
+
from cognee.api.client import app
|
|
11
|
+
from cognee.modules.users.methods import create_user, get_default_user
|
|
12
|
+
from cognee.modules.users.permissions.methods import authorized_give_permission_on_datasets
|
|
13
|
+
|
|
14
|
+
# Use pytest-asyncio to handle all async tests
|
|
15
|
+
pytestmark = pytest.mark.asyncio
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@pytest.fixture(scope="module")
|
|
19
|
+
def event_loop():
|
|
20
|
+
"""Create an instance of the default event loop for our test module."""
|
|
21
|
+
policy = asyncio.get_event_loop_policy()
|
|
22
|
+
loop = policy.new_event_loop()
|
|
23
|
+
yield loop
|
|
24
|
+
loop.close()
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@pytest_asyncio.fixture(scope="module")
|
|
28
|
+
async def client():
|
|
29
|
+
"""Create an async HTTP client for testing"""
|
|
30
|
+
transport = ASGITransport(app=app)
|
|
31
|
+
async with AsyncClient(transport=transport, base_url="http://test") as client:
|
|
32
|
+
yield client
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@pytest_asyncio.fixture(scope="module")
|
|
36
|
+
async def setup_environment():
|
|
37
|
+
"""
|
|
38
|
+
Set up a clean environment for the test, creating necessary users and datasets.
|
|
39
|
+
This fixture runs once before all tests and cleans up afterwards.
|
|
40
|
+
"""
|
|
41
|
+
# 1. Enable permissions feature
|
|
42
|
+
os.environ["ENABLE_BACKEND_ACCESS_CONTROL"] = "True"
|
|
43
|
+
|
|
44
|
+
# 2. Set up an independent test directory
|
|
45
|
+
base_dir = pathlib.Path(__file__).parent
|
|
46
|
+
cognee.config.data_root_directory(str(base_dir / ".data_storage/test_graph_viz"))
|
|
47
|
+
cognee.config.system_root_directory(str(base_dir / ".cognee_system/test_graph_viz"))
|
|
48
|
+
|
|
49
|
+
# 3. Clean up old data
|
|
50
|
+
await cognee.prune.prune_data()
|
|
51
|
+
await cognee.prune.prune_system(metadata=True)
|
|
52
|
+
|
|
53
|
+
# 4. Add document for default user
|
|
54
|
+
explanation_file_path = os.path.join(base_dir, "test_data/Natural_language_processing.txt")
|
|
55
|
+
await cognee.add([explanation_file_path], dataset_name="NLP")
|
|
56
|
+
default_user = await get_default_user()
|
|
57
|
+
nlp_cognify_result = await cognee.cognify(["NLP"], user=default_user)
|
|
58
|
+
|
|
59
|
+
def extract_dataset_id_from_cognify(cognify_result):
|
|
60
|
+
"""Extract dataset_id from cognify output dictionary"""
|
|
61
|
+
for dataset_id, pipeline_result in cognify_result.items():
|
|
62
|
+
return dataset_id
|
|
63
|
+
return None
|
|
64
|
+
|
|
65
|
+
dataset_id = extract_dataset_id_from_cognify(nlp_cognify_result)
|
|
66
|
+
|
|
67
|
+
yield dataset_id
|
|
68
|
+
|
|
69
|
+
# 5. Clean up data after tests are finished
|
|
70
|
+
await cognee.prune.prune_data()
|
|
71
|
+
await cognee.prune.prune_system(metadata=True)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
async def get_authentication_headers(client: AsyncClient, email: str, password: str) -> dict:
|
|
75
|
+
"""Authenticates and returns the Authorization header."""
|
|
76
|
+
login_data = {"username": email, "password": password}
|
|
77
|
+
response = await client.post("/api/v1/auth/login", data=login_data, timeout=15)
|
|
78
|
+
|
|
79
|
+
assert response.status_code == 200, "Failed to log in and get token"
|
|
80
|
+
|
|
81
|
+
token_data = response.json()
|
|
82
|
+
access_token = token_data["access_token"]
|
|
83
|
+
|
|
84
|
+
return {"Authorization": f"Bearer {access_token}"}
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
async def test_owner_can_access_graph(client: AsyncClient, setup_environment: int):
|
|
88
|
+
"""
|
|
89
|
+
Test Case 1: The dataset owner should be able to access the graph data successfully.
|
|
90
|
+
"""
|
|
91
|
+
dataset_id = setup_environment
|
|
92
|
+
default_user_email = "default_user@example.com"
|
|
93
|
+
default_user_password = "default_password"
|
|
94
|
+
|
|
95
|
+
response = await client.get(
|
|
96
|
+
f"/api/v1/datasets/{dataset_id}/graph",
|
|
97
|
+
headers=await get_authentication_headers(client, default_user_email, default_user_password),
|
|
98
|
+
)
|
|
99
|
+
assert response.status_code == 200, (
|
|
100
|
+
f"Owner failed to get the knowledge graph visualization. Response: {response.json()}"
|
|
101
|
+
)
|
|
102
|
+
data = response.json()
|
|
103
|
+
assert len(data) > 1, "The graph data is not valid."
|
|
104
|
+
|
|
105
|
+
print("✅ Owner can access the graph visualization successfully.")
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
async def test_granting_permission_enables_access(client: AsyncClient, setup_environment: int):
|
|
109
|
+
"""
|
|
110
|
+
Test Case 2: A user without any permissions should be denied access (404 Not Found).
|
|
111
|
+
After granting permission, the user should be able to access the graph data.
|
|
112
|
+
"""
|
|
113
|
+
dataset_id = setup_environment
|
|
114
|
+
# Create a user without any permissions to the dataset
|
|
115
|
+
test_user_email = "test_user@example.com"
|
|
116
|
+
test_user_password = "test_password"
|
|
117
|
+
test_user = await create_user(test_user_email, test_user_password)
|
|
118
|
+
|
|
119
|
+
# Test the access to graph visualization for the test user without any permissions
|
|
120
|
+
response = await client.get(
|
|
121
|
+
f"/api/v1/datasets/{dataset_id}/graph",
|
|
122
|
+
headers=await get_authentication_headers(client, test_user_email, test_user_password),
|
|
123
|
+
)
|
|
124
|
+
assert response.status_code == 403, (
|
|
125
|
+
"Access to graph visualization should be denied without READ permission."
|
|
126
|
+
)
|
|
127
|
+
assert (
|
|
128
|
+
response.json()["detail"]
|
|
129
|
+
== "Request owner does not have necessary permission: [read] for all datasets requested. [PermissionDeniedError]"
|
|
130
|
+
)
|
|
131
|
+
print("✅ Access to graph visualization should be denied without READ permission.")
|
|
132
|
+
|
|
133
|
+
# Grant permission to the test user
|
|
134
|
+
default_user = await get_default_user()
|
|
135
|
+
await authorized_give_permission_on_datasets(
|
|
136
|
+
test_user.id, [dataset_id], "read", default_user.id
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
# Test the access to graph visualization for the test user
|
|
140
|
+
response_for_test_user = await client.get(
|
|
141
|
+
f"/api/v1/datasets/{dataset_id}/graph",
|
|
142
|
+
headers=await get_authentication_headers(client, test_user_email, test_user_password),
|
|
143
|
+
)
|
|
144
|
+
assert response_for_test_user.status_code == 200, (
|
|
145
|
+
"Access to graph visualization should succeed for user with been granted read permission"
|
|
146
|
+
)
|
|
147
|
+
print(
|
|
148
|
+
"✅ Access to graph visualization should succeed for user with been granted read permission"
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
# Test the graph data is the same for the test user and the default user
|
|
152
|
+
default_user_email = "default_user@example.com"
|
|
153
|
+
default_user_password = "default_password"
|
|
154
|
+
response_for_default_user = await client.get(
|
|
155
|
+
f"/api/v1/datasets/{dataset_id}/graph",
|
|
156
|
+
headers=await get_authentication_headers(client, default_user_email, default_user_password),
|
|
157
|
+
)
|
|
158
|
+
assert response_for_test_user.json() == response_for_default_user.json(), (
|
|
159
|
+
"The graph data for the test user and the default user is not the same."
|
|
160
|
+
)
|
|
161
|
+
print("✅ The graph data for the test user and the default user is the same.")
|
|
@@ -0,0 +1,309 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from dotenv import load_dotenv
|
|
3
|
+
import asyncio
|
|
4
|
+
from cognee.infrastructure.databases.graph.neptune_driver import NeptuneGraphDB
|
|
5
|
+
from cognee.modules.chunking.models import DocumentChunk
|
|
6
|
+
from cognee.modules.engine.models import Entity, EntityType
|
|
7
|
+
from cognee.modules.data.processing.document_types import TextDocument
|
|
8
|
+
|
|
9
|
+
# Set up Amazon credentials in .env file and get the values from environment variables
|
|
10
|
+
load_dotenv()
|
|
11
|
+
graph_id = os.getenv("GRAPH_ID", "")
|
|
12
|
+
|
|
13
|
+
na_adapter = NeptuneGraphDB(graph_id)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def setup():
|
|
17
|
+
# Define nodes data before the main function
|
|
18
|
+
# These nodes were defined using openAI from the following prompt:
|
|
19
|
+
|
|
20
|
+
# Neptune Analytics is an ideal choice for investigatory, exploratory, or data-science workloads
|
|
21
|
+
# that require fast iteration for data, analytical and algorithmic processing, or vector search on graph data. It
|
|
22
|
+
# complements Amazon Neptune Database, a popular managed graph database. To perform intensive analysis, you can load
|
|
23
|
+
# the data from a Neptune Database graph or snapshot into Neptune Analytics. You can also load graph data that's
|
|
24
|
+
# stored in Amazon S3.
|
|
25
|
+
|
|
26
|
+
document = TextDocument(
|
|
27
|
+
name="text_test.txt",
|
|
28
|
+
raw_data_location="git/cognee/examples/database_examples/data_storage/data/text_test.txt",
|
|
29
|
+
external_metadata="{}",
|
|
30
|
+
mime_type="text/plain",
|
|
31
|
+
)
|
|
32
|
+
document_chunk = DocumentChunk(
|
|
33
|
+
text="Neptune Analytics is an ideal choice for investigatory, exploratory, or data-science workloads \n that require fast iteration for data, analytical and algorithmic processing, or vector search on graph data. It \n complements Amazon Neptune Database, a popular managed graph database. To perform intensive analysis, you can load \n the data from a Neptune Database graph or snapshot into Neptune Analytics. You can also load graph data that's \n stored in Amazon S3.\n ",
|
|
34
|
+
chunk_size=187,
|
|
35
|
+
chunk_index=0,
|
|
36
|
+
cut_type="paragraph_end",
|
|
37
|
+
is_part_of=document,
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
graph_database = EntityType(name="graph database", description="graph database")
|
|
41
|
+
neptune_analytics_entity = Entity(
|
|
42
|
+
name="neptune analytics",
|
|
43
|
+
description="A memory-optimized graph database engine for analytics that processes large amounts of graph data quickly.",
|
|
44
|
+
)
|
|
45
|
+
neptune_database_entity = Entity(
|
|
46
|
+
name="amazon neptune database",
|
|
47
|
+
description="A popular managed graph database that complements Neptune Analytics.",
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
storage = EntityType(name="storage", description="storage")
|
|
51
|
+
storage_entity = Entity(
|
|
52
|
+
name="amazon s3",
|
|
53
|
+
description="A storage service provided by Amazon Web Services that allows storing graph data.",
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
nodes_data = [
|
|
57
|
+
document,
|
|
58
|
+
document_chunk,
|
|
59
|
+
graph_database,
|
|
60
|
+
neptune_analytics_entity,
|
|
61
|
+
neptune_database_entity,
|
|
62
|
+
storage,
|
|
63
|
+
storage_entity,
|
|
64
|
+
]
|
|
65
|
+
|
|
66
|
+
edges_data = [
|
|
67
|
+
(
|
|
68
|
+
str(document_chunk.id),
|
|
69
|
+
str(storage_entity.id),
|
|
70
|
+
"contains",
|
|
71
|
+
),
|
|
72
|
+
(
|
|
73
|
+
str(storage_entity.id),
|
|
74
|
+
str(storage.id),
|
|
75
|
+
"is_a",
|
|
76
|
+
),
|
|
77
|
+
(
|
|
78
|
+
str(document_chunk.id),
|
|
79
|
+
str(neptune_database_entity.id),
|
|
80
|
+
"contains",
|
|
81
|
+
),
|
|
82
|
+
(
|
|
83
|
+
str(neptune_database_entity.id),
|
|
84
|
+
str(graph_database.id),
|
|
85
|
+
"is_a",
|
|
86
|
+
),
|
|
87
|
+
(
|
|
88
|
+
str(document_chunk.id),
|
|
89
|
+
str(document.id),
|
|
90
|
+
"is_part_of",
|
|
91
|
+
),
|
|
92
|
+
(
|
|
93
|
+
str(document_chunk.id),
|
|
94
|
+
str(neptune_analytics_entity.id),
|
|
95
|
+
"contains",
|
|
96
|
+
),
|
|
97
|
+
(
|
|
98
|
+
str(neptune_analytics_entity.id),
|
|
99
|
+
str(graph_database.id),
|
|
100
|
+
"is_a",
|
|
101
|
+
),
|
|
102
|
+
]
|
|
103
|
+
|
|
104
|
+
return nodes_data, edges_data
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
async def pipeline_method():
|
|
108
|
+
"""
|
|
109
|
+
Example script using the neptune analytics with small sample data
|
|
110
|
+
|
|
111
|
+
This example demonstrates how to add nodes to Neptune Analytics
|
|
112
|
+
"""
|
|
113
|
+
|
|
114
|
+
print("------TRUNCATE GRAPH-------")
|
|
115
|
+
await na_adapter.delete_graph()
|
|
116
|
+
|
|
117
|
+
print("------SETUP DATA-------")
|
|
118
|
+
nodes, edges = setup()
|
|
119
|
+
|
|
120
|
+
print("------ADD NODES-------")
|
|
121
|
+
await na_adapter.add_node(nodes[0])
|
|
122
|
+
await na_adapter.add_nodes(nodes[1:])
|
|
123
|
+
|
|
124
|
+
print("------GET NODES FROM DATA-------")
|
|
125
|
+
node_ids = [str(node.id) for node in nodes]
|
|
126
|
+
db_nodes = await na_adapter.get_nodes(node_ids)
|
|
127
|
+
|
|
128
|
+
print("------RESULTS:-------")
|
|
129
|
+
for n in db_nodes:
|
|
130
|
+
print(n)
|
|
131
|
+
|
|
132
|
+
print("------ADD EDGES-------")
|
|
133
|
+
await na_adapter.add_edge(edges[0][0], edges[0][1], edges[0][2])
|
|
134
|
+
await na_adapter.add_edges(edges[1:])
|
|
135
|
+
|
|
136
|
+
print("------HAS EDGES-------")
|
|
137
|
+
has_edge = await na_adapter.has_edge(
|
|
138
|
+
edges[0][0],
|
|
139
|
+
edges[0][1],
|
|
140
|
+
edges[0][2],
|
|
141
|
+
)
|
|
142
|
+
if has_edge:
|
|
143
|
+
print(f"found edge ({edges[0][0]})-[{edges[0][2]}]->({edges[0][1]})")
|
|
144
|
+
|
|
145
|
+
has_edges = await na_adapter.has_edges(edges)
|
|
146
|
+
if len(has_edges) > 0:
|
|
147
|
+
print(f"found edges: {len(has_edges)} (expected: {len(edges)})")
|
|
148
|
+
else:
|
|
149
|
+
print(f"no edges found (expected: {len(edges)})")
|
|
150
|
+
|
|
151
|
+
print("------GET GRAPH-------")
|
|
152
|
+
all_nodes, all_edges = await na_adapter.get_graph_data()
|
|
153
|
+
print(f"found {len(all_nodes)} nodes and found {len(all_edges)} edges")
|
|
154
|
+
|
|
155
|
+
print("------NEIGHBORING NODES-------")
|
|
156
|
+
center_node = nodes[2]
|
|
157
|
+
neighbors = await na_adapter.get_neighbors(str(center_node.id))
|
|
158
|
+
print(f'found {len(neighbors)} neighbors for node "{center_node.name}"')
|
|
159
|
+
for neighbor in neighbors:
|
|
160
|
+
print(neighbor)
|
|
161
|
+
|
|
162
|
+
print("------NEIGHBORING EDGES-------")
|
|
163
|
+
center_node = nodes[2]
|
|
164
|
+
neighbouring_edges = await na_adapter.get_edges(str(center_node.id))
|
|
165
|
+
print(f'found {len(neighbouring_edges)} edges neighbouring node "{center_node.name}"')
|
|
166
|
+
for edge in neighbouring_edges:
|
|
167
|
+
print(edge)
|
|
168
|
+
|
|
169
|
+
print("------GET CONNECTIONS (SOURCE NODE)-------")
|
|
170
|
+
document_chunk_node = nodes[0]
|
|
171
|
+
connections = await na_adapter.get_connections(str(document_chunk_node.id))
|
|
172
|
+
print(f'found {len(connections)} connections for node "{document_chunk_node.type}"')
|
|
173
|
+
for connection in connections:
|
|
174
|
+
src, relationship, tgt = connection
|
|
175
|
+
src = src.get("name", src.get("type", "unknown"))
|
|
176
|
+
relationship = relationship["relationship_name"]
|
|
177
|
+
tgt = tgt.get("name", tgt.get("type", "unknown"))
|
|
178
|
+
print(f'"{src}"-[{relationship}]->"{tgt}"')
|
|
179
|
+
|
|
180
|
+
print("------GET CONNECTIONS (TARGET NODE)-------")
|
|
181
|
+
connections = await na_adapter.get_connections(str(center_node.id))
|
|
182
|
+
print(f'found {len(connections)} connections for node "{center_node.name}"')
|
|
183
|
+
for connection in connections:
|
|
184
|
+
src, relationship, tgt = connection
|
|
185
|
+
src = src.get("name", src.get("type", "unknown"))
|
|
186
|
+
relationship = relationship["relationship_name"]
|
|
187
|
+
tgt = tgt.get("name", tgt.get("type", "unknown"))
|
|
188
|
+
print(f'"{src}"-[{relationship}]->"{tgt}"')
|
|
189
|
+
|
|
190
|
+
print("------SUBGRAPH-------")
|
|
191
|
+
node_names = ["neptune analytics", "amazon neptune database"]
|
|
192
|
+
subgraph_nodes, subgraph_edges = await na_adapter.get_nodeset_subgraph(Entity, node_names)
|
|
193
|
+
print(
|
|
194
|
+
f"found {len(subgraph_nodes)} nodes and {len(subgraph_edges)} edges in the subgraph around {node_names}"
|
|
195
|
+
)
|
|
196
|
+
for subgraph_node in subgraph_nodes:
|
|
197
|
+
print(subgraph_node)
|
|
198
|
+
for subgraph_edge in subgraph_edges:
|
|
199
|
+
print(subgraph_edge)
|
|
200
|
+
|
|
201
|
+
print("------STAT-------")
|
|
202
|
+
stat = await na_adapter.get_graph_metrics(include_optional=True)
|
|
203
|
+
assert type(stat) is dict
|
|
204
|
+
assert stat["num_nodes"] == 7
|
|
205
|
+
assert stat["num_edges"] == 7
|
|
206
|
+
assert stat["mean_degree"] == 2.0
|
|
207
|
+
assert round(stat["edge_density"], 3) == 0.167
|
|
208
|
+
assert stat["num_connected_components"] == [7]
|
|
209
|
+
assert stat["sizes_of_connected_components"] == 1
|
|
210
|
+
assert stat["num_selfloops"] == 0
|
|
211
|
+
# Unsupported optional metrics
|
|
212
|
+
assert stat["diameter"] == -1
|
|
213
|
+
assert stat["avg_shortest_path_length"] == -1
|
|
214
|
+
assert stat["avg_clustering"] == -1
|
|
215
|
+
|
|
216
|
+
print("------DELETE-------")
|
|
217
|
+
# delete all nodes and edges:
|
|
218
|
+
await na_adapter.delete_graph()
|
|
219
|
+
|
|
220
|
+
# delete all nodes by node id
|
|
221
|
+
# node_ids = [str(node.id) for node in nodes]
|
|
222
|
+
# await na_adapter.delete_nodes(node_ids)
|
|
223
|
+
|
|
224
|
+
has_edges = await na_adapter.has_edges(edges)
|
|
225
|
+
if len(has_edges) == 0:
|
|
226
|
+
print("Delete successful")
|
|
227
|
+
else:
|
|
228
|
+
print("Delete failed")
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
async def misc_methods():
|
|
232
|
+
print("------TRUNCATE GRAPH-------")
|
|
233
|
+
await na_adapter.delete_graph()
|
|
234
|
+
|
|
235
|
+
print("------SETUP TEST ENV-------")
|
|
236
|
+
nodes, edges = setup()
|
|
237
|
+
await na_adapter.add_nodes(nodes)
|
|
238
|
+
await na_adapter.add_edges(edges)
|
|
239
|
+
|
|
240
|
+
print("------GET GRAPH-------")
|
|
241
|
+
all_nodes, all_edges = await na_adapter.get_graph_data()
|
|
242
|
+
print(f"found {len(all_nodes)} nodes and found {len(all_edges)} edges")
|
|
243
|
+
|
|
244
|
+
print("------GET DISCONNECTED-------")
|
|
245
|
+
nodes_disconnected = await na_adapter.get_disconnected_nodes()
|
|
246
|
+
print(nodes_disconnected)
|
|
247
|
+
assert len(nodes_disconnected) == 0
|
|
248
|
+
|
|
249
|
+
print("------Get Labels (Node)-------")
|
|
250
|
+
node_labels = await na_adapter.get_node_labels_string()
|
|
251
|
+
print(node_labels)
|
|
252
|
+
|
|
253
|
+
print("------Get Labels (Edge)-------")
|
|
254
|
+
edge_labels = await na_adapter.get_relationship_labels_string()
|
|
255
|
+
print(edge_labels)
|
|
256
|
+
|
|
257
|
+
print("------Get Filtered Graph-------")
|
|
258
|
+
filtered_nodes, filtered_edges = await na_adapter.get_filtered_graph_data(
|
|
259
|
+
[{"name": ["text_test.txt"]}]
|
|
260
|
+
)
|
|
261
|
+
print(filtered_nodes, filtered_edges)
|
|
262
|
+
|
|
263
|
+
print("------Get Degree one nodes-------")
|
|
264
|
+
degree_one_nodes = await na_adapter.get_degree_one_nodes("EntityType")
|
|
265
|
+
print(degree_one_nodes)
|
|
266
|
+
|
|
267
|
+
print("------Get Doc sub-graph-------")
|
|
268
|
+
doc_sub_graph = await na_adapter.get_document_subgraph("test.txt")
|
|
269
|
+
print(doc_sub_graph)
|
|
270
|
+
|
|
271
|
+
print("------Fetch and Remove connections (Predecessors)-------")
|
|
272
|
+
# Fetch test edge
|
|
273
|
+
(src_id, dest_id, relationship) = edges[0]
|
|
274
|
+
nodes_predecessors = await na_adapter.get_predecessors(node_id=dest_id, edge_label=relationship)
|
|
275
|
+
assert len(nodes_predecessors) > 0
|
|
276
|
+
|
|
277
|
+
await na_adapter.remove_connection_to_predecessors_of(
|
|
278
|
+
node_ids=[src_id], edge_label=relationship
|
|
279
|
+
)
|
|
280
|
+
nodes_predecessors_after = await na_adapter.get_predecessors(
|
|
281
|
+
node_id=dest_id, edge_label=relationship
|
|
282
|
+
)
|
|
283
|
+
# Return empty after relationship being deleted.
|
|
284
|
+
assert len(nodes_predecessors_after) == 0
|
|
285
|
+
|
|
286
|
+
print("------Fetch and Remove connections (Successors)-------")
|
|
287
|
+
_, edges_suc = await na_adapter.get_graph_data()
|
|
288
|
+
(src_id, dest_id, relationship, _) = edges_suc[0]
|
|
289
|
+
|
|
290
|
+
nodes_successors = await na_adapter.get_successors(node_id=src_id, edge_label=relationship)
|
|
291
|
+
assert len(nodes_successors) > 0
|
|
292
|
+
|
|
293
|
+
await na_adapter.remove_connection_to_successors_of(node_ids=[dest_id], edge_label=relationship)
|
|
294
|
+
nodes_successors_after = await na_adapter.get_successors(
|
|
295
|
+
node_id=src_id, edge_label=relationship
|
|
296
|
+
)
|
|
297
|
+
assert len(nodes_successors_after) == 0
|
|
298
|
+
|
|
299
|
+
# no-op
|
|
300
|
+
await na_adapter.project_entire_graph()
|
|
301
|
+
await na_adapter.drop_graph()
|
|
302
|
+
await na_adapter.graph_exists()
|
|
303
|
+
|
|
304
|
+
pass
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
if __name__ == "__main__":
|
|
308
|
+
asyncio.run(pipeline_method())
|
|
309
|
+
asyncio.run(misc_methods())
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from dotenv import load_dotenv
|
|
3
|
+
import asyncio
|
|
4
|
+
import pytest
|
|
5
|
+
|
|
6
|
+
from cognee.modules.chunking.models import DocumentChunk
|
|
7
|
+
from cognee.modules.engine.models import Entity, EntityType
|
|
8
|
+
from cognee.modules.data.processing.document_types import TextDocument
|
|
9
|
+
from cognee.infrastructure.databases.vector.embeddings import get_embedding_engine
|
|
10
|
+
from cognee.shared.logging_utils import get_logger
|
|
11
|
+
from cognee.infrastructure.databases.hybrid.neptune_analytics.NeptuneAnalyticsAdapter import (
|
|
12
|
+
NeptuneAnalyticsAdapter,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
# Set up Amazon credentials in .env file and get the values from environment variables
|
|
16
|
+
load_dotenv()
|
|
17
|
+
graph_id = os.getenv("GRAPH_ID", "")
|
|
18
|
+
|
|
19
|
+
# get the default embedder
|
|
20
|
+
embedding_engine = get_embedding_engine()
|
|
21
|
+
na_graph = NeptuneAnalyticsAdapter(graph_id)
|
|
22
|
+
na_vector = NeptuneAnalyticsAdapter(graph_id, embedding_engine)
|
|
23
|
+
|
|
24
|
+
collection = "test_collection"
|
|
25
|
+
|
|
26
|
+
logger = get_logger("test_neptune_analytics_hybrid")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def setup_data():
|
|
30
|
+
# Define nodes data before the main function
|
|
31
|
+
# These nodes were defined using openAI from the following prompt:
|
|
32
|
+
#
|
|
33
|
+
# Neptune Analytics is an ideal choice for investigatory, exploratory, or data-science workloads
|
|
34
|
+
# that require fast iteration for data, analytical and algorithmic processing, or vector search on graph data. It
|
|
35
|
+
# complements Amazon Neptune Database, a popular managed graph database. To perform intensive analysis, you can load
|
|
36
|
+
# the data from a Neptune Database graph or snapshot into Neptune Analytics. You can also load graph data that's
|
|
37
|
+
# stored in Amazon S3.
|
|
38
|
+
|
|
39
|
+
document = TextDocument(
|
|
40
|
+
name="text.txt",
|
|
41
|
+
raw_data_location="git/cognee/examples/database_examples/data_storage/data/text.txt",
|
|
42
|
+
external_metadata="{}",
|
|
43
|
+
mime_type="text/plain",
|
|
44
|
+
)
|
|
45
|
+
document_chunk = DocumentChunk(
|
|
46
|
+
text="Neptune Analytics is an ideal choice for investigatory, exploratory, or data-science workloads \n that require fast iteration for data, analytical and algorithmic processing, or vector search on graph data. It \n complements Amazon Neptune Database, a popular managed graph database. To perform intensive analysis, you can load \n the data from a Neptune Database graph or snapshot into Neptune Analytics. You can also load graph data that's \n stored in Amazon S3.\n ",
|
|
47
|
+
chunk_size=187,
|
|
48
|
+
chunk_index=0,
|
|
49
|
+
cut_type="paragraph_end",
|
|
50
|
+
is_part_of=document,
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
graph_database = EntityType(name="graph database", description="graph database")
|
|
54
|
+
neptune_analytics_entity = Entity(
|
|
55
|
+
name="neptune analytics",
|
|
56
|
+
description="A memory-optimized graph database engine for analytics that processes large amounts of graph data quickly.",
|
|
57
|
+
)
|
|
58
|
+
neptune_database_entity = Entity(
|
|
59
|
+
name="amazon neptune database",
|
|
60
|
+
description="A popular managed graph database that complements Neptune Analytics.",
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
storage = EntityType(name="storage", description="storage")
|
|
64
|
+
storage_entity = Entity(
|
|
65
|
+
name="amazon s3",
|
|
66
|
+
description="A storage service provided by Amazon Web Services that allows storing graph data.",
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
nodes_data = [
|
|
70
|
+
document,
|
|
71
|
+
document_chunk,
|
|
72
|
+
graph_database,
|
|
73
|
+
neptune_analytics_entity,
|
|
74
|
+
neptune_database_entity,
|
|
75
|
+
storage,
|
|
76
|
+
storage_entity,
|
|
77
|
+
]
|
|
78
|
+
|
|
79
|
+
edges_data = [
|
|
80
|
+
(
|
|
81
|
+
str(document_chunk.id),
|
|
82
|
+
str(storage_entity.id),
|
|
83
|
+
"contains",
|
|
84
|
+
),
|
|
85
|
+
(
|
|
86
|
+
str(storage_entity.id),
|
|
87
|
+
str(storage.id),
|
|
88
|
+
"is_a",
|
|
89
|
+
),
|
|
90
|
+
(
|
|
91
|
+
str(document_chunk.id),
|
|
92
|
+
str(neptune_database_entity.id),
|
|
93
|
+
"contains",
|
|
94
|
+
),
|
|
95
|
+
(
|
|
96
|
+
str(neptune_database_entity.id),
|
|
97
|
+
str(graph_database.id),
|
|
98
|
+
"is_a",
|
|
99
|
+
),
|
|
100
|
+
(
|
|
101
|
+
str(document_chunk.id),
|
|
102
|
+
str(document.id),
|
|
103
|
+
"is_part_of",
|
|
104
|
+
),
|
|
105
|
+
(
|
|
106
|
+
str(document_chunk.id),
|
|
107
|
+
str(neptune_analytics_entity.id),
|
|
108
|
+
"contains",
|
|
109
|
+
),
|
|
110
|
+
(
|
|
111
|
+
str(neptune_analytics_entity.id),
|
|
112
|
+
str(graph_database.id),
|
|
113
|
+
"is_a",
|
|
114
|
+
),
|
|
115
|
+
]
|
|
116
|
+
return nodes_data, edges_data
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
async def test_add_graph_then_vector_data():
|
|
120
|
+
logger.info("------test_add_graph_then_vector_data-------")
|
|
121
|
+
(nodes, edges) = setup_data()
|
|
122
|
+
await na_graph.add_nodes(nodes)
|
|
123
|
+
await na_graph.add_edges(edges)
|
|
124
|
+
await na_vector.create_data_points(collection, nodes)
|
|
125
|
+
|
|
126
|
+
node_ids = [str(node.id) for node in nodes]
|
|
127
|
+
retrieved_data_points = await na_vector.retrieve(collection, node_ids)
|
|
128
|
+
retrieved_nodes = await na_graph.get_nodes(node_ids)
|
|
129
|
+
|
|
130
|
+
assert len(retrieved_data_points) == len(retrieved_nodes) == len(node_ids)
|
|
131
|
+
|
|
132
|
+
# delete all nodes and edges and vectors:
|
|
133
|
+
await na_graph.delete_graph()
|
|
134
|
+
await na_vector.prune()
|
|
135
|
+
|
|
136
|
+
(nodes, edges) = await na_graph.get_graph_data()
|
|
137
|
+
assert len(nodes) == 0
|
|
138
|
+
assert len(edges) == 0
|
|
139
|
+
logger.info("------PASSED-------")
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
async def test_add_vector_then_node_data():
|
|
143
|
+
logger.info("------test_add_vector_then_node_data-------")
|
|
144
|
+
(nodes, edges) = setup_data()
|
|
145
|
+
await na_vector.create_data_points(collection, nodes)
|
|
146
|
+
await na_graph.add_nodes(nodes)
|
|
147
|
+
await na_graph.add_edges(edges)
|
|
148
|
+
|
|
149
|
+
node_ids = [str(node.id) for node in nodes]
|
|
150
|
+
retrieved_data_points = await na_vector.retrieve(collection, node_ids)
|
|
151
|
+
retrieved_nodes = await na_graph.get_nodes(node_ids)
|
|
152
|
+
|
|
153
|
+
assert len(retrieved_data_points) == len(retrieved_nodes) == len(node_ids)
|
|
154
|
+
|
|
155
|
+
# delete all nodes and edges and vectors:
|
|
156
|
+
await na_vector.prune()
|
|
157
|
+
await na_graph.delete_graph()
|
|
158
|
+
|
|
159
|
+
(nodes, edges) = await na_graph.get_graph_data()
|
|
160
|
+
assert len(nodes) == 0
|
|
161
|
+
assert len(edges) == 0
|
|
162
|
+
logger.info("------PASSED-------")
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def main():
|
|
166
|
+
"""
|
|
167
|
+
Example script uses neptune analytics for the graph and vector (hybrid) store with small sample data
|
|
168
|
+
This example demonstrates how to add nodes and vectors to Neptune Analytics, and ensures that
|
|
169
|
+
the nodes do not conflict
|
|
170
|
+
"""
|
|
171
|
+
asyncio.run(test_add_graph_then_vector_data())
|
|
172
|
+
asyncio.run(test_add_vector_then_node_data())
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
if __name__ == "__main__":
|
|
176
|
+
main()
|