graphiti-core 0.18.9__tar.gz → 0.19.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of graphiti-core might be problematic. Click here for more details.
- graphiti_core-0.19.0/.github/workflows/ai-moderator.yml +30 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.github/workflows/cla.yml +1 -1
- graphiti_core-0.19.0/.github/workflows/claude-code-review.yml +50 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.github/workflows/claude.yml +1 -1
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.github/workflows/unit_tests.yml +1 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/PKG-INFO +87 -13
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/README.md +78 -11
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/conftest.py +4 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/quickstart/README.md +16 -1
- graphiti_core-0.19.0/examples/quickstart/quickstart_neptune.py +252 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/driver/driver.py +4 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/driver/falkordb_driver.py +3 -14
- graphiti_core-0.19.0/graphiti_core/driver/kuzu_driver.py +175 -0
- graphiti_core-0.19.0/graphiti_core/driver/neptune_driver.py +301 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/edges.py +155 -62
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/graph_queries.py +31 -2
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/graphiti.py +6 -1
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/helpers.py +8 -8
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/llm_client/config.py +1 -1
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/llm_client/openai_base_client.py +12 -2
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/llm_client/openai_client.py +10 -2
- graphiti_core-0.19.0/graphiti_core/migrations/neo4j_node_group_labels.py +114 -0
- graphiti_core-0.19.0/graphiti_core/models/edges/edge_db_queries.py +259 -0
- graphiti_core-0.19.0/graphiti_core/models/nodes/node_db_queries.py +312 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/nodes.py +271 -98
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/search/search.py +42 -12
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/search/search_config.py +4 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/search/search_filters.py +35 -22
- graphiti_core-0.19.0/graphiti_core/search/search_utils.py +2016 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/utils/bulk_utils.py +50 -15
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/utils/datetime_utils.py +13 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/utils/maintenance/community_operations.py +39 -32
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/utils/maintenance/edge_operations.py +47 -13
- graphiti_core-0.19.0/graphiti_core/utils/maintenance/graph_data_operations.py +220 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/pyproject.toml +6 -7
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/pytest.ini +2 -1
- graphiti_core-0.19.0/server/graph_service/routers/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/signatures/version1/cla.json +32 -0
- graphiti_core-0.19.0/tests/helpers_test.py +313 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/test_edge_int.py +121 -108
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/test_entity_exclusion_int.py +0 -5
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/test_graphiti_int.py +6 -9
- graphiti_core-0.19.0/tests/test_graphiti_mock.py +2068 -0
- graphiti_core-0.19.0/tests/test_node_int.py +229 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/uv.lock +200 -16
- graphiti_core-0.18.9/.github/workflows/claude-code-review.yml +0 -78
- graphiti_core-0.18.9/graphiti_core/models/edges/edge_db_queries.py +0 -130
- graphiti_core-0.18.9/graphiti_core/models/nodes/node_db_queries.py +0 -133
- graphiti_core-0.18.9/graphiti_core/search/search_utils.py +0 -1079
- graphiti_core-0.18.9/graphiti_core/utils/maintenance/graph_data_operations.py +0 -135
- graphiti_core-0.18.9/tests/helpers_test.py +0 -96
- graphiti_core-0.18.9/tests/test_node_int.py +0 -243
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.env.example +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.github/dependabot.yml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.github/pull_request_template.md +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.github/secret_scanning.yml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.github/workflows/codeql.yml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.github/workflows/lint.yml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.github/workflows/mcp-server-docker.yml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.github/workflows/release-graphiti-core.yml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.github/workflows/typecheck.yml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/.gitignore +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/CLAUDE.md +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/CODE_OF_CONDUCT.md +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/CONTRIBUTING.md +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/Dockerfile +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/LICENSE +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/Makefile +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/SECURITY.md +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/Zep-CLA.md +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/depot.json +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/docker-compose.test.yml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/docker-compose.yml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/ellipsis.yaml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/data/manybirds_products.json +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/ecommerce/runner.ipynb +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/ecommerce/runner.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/langgraph-agent/agent.ipynb +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/langgraph-agent/tinybirds-jess.png +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/podcast/podcast_runner.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/podcast/podcast_transcript.txt +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/podcast/transcript_parser.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/quickstart/quickstart_falkordb.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/quickstart/quickstart_neo4j.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/quickstart/requirements.txt +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/wizard_of_oz/parser.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/wizard_of_oz/runner.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/examples/wizard_of_oz/woo.txt +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/cross_encoder/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/cross_encoder/bge_reranker_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/cross_encoder/client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/cross_encoder/gemini_reranker_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/cross_encoder/openai_reranker_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/driver/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/driver/neo4j_driver.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/embedder/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/embedder/azure_openai.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/embedder/client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/embedder/gemini.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/embedder/openai.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/embedder/voyage.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/errors.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/graphiti_types.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/llm_client/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/llm_client/anthropic_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/llm_client/azure_openai_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/llm_client/client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/llm_client/errors.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/llm_client/gemini_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/llm_client/groq_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/llm_client/openai_generic_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/llm_client/utils.py +0 -0
- {graphiti_core-0.18.9/graphiti_core/models → graphiti_core-0.19.0/graphiti_core/migrations}/__init__.py +0 -0
- {graphiti_core-0.18.9/graphiti_core/models/edges → graphiti_core-0.19.0/graphiti_core/models}/__init__.py +0 -0
- {graphiti_core-0.18.9/graphiti_core/models/nodes → graphiti_core-0.19.0/graphiti_core/models/edges}/__init__.py +0 -0
- {graphiti_core-0.18.9/graphiti_core/search → graphiti_core-0.19.0/graphiti_core/models/nodes}/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/prompts/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/prompts/dedupe_edges.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/prompts/dedupe_nodes.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/prompts/eval.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/prompts/extract_edge_dates.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/prompts/extract_edges.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/prompts/extract_nodes.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/prompts/invalidate_edges.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/prompts/lib.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/prompts/models.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/prompts/prompt_helpers.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/prompts/summarize_nodes.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/py.typed +0 -0
- {graphiti_core-0.18.9/graphiti_core/utils → graphiti_core-0.19.0/graphiti_core/search}/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/search/search_config_recipes.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/search/search_helpers.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/telemetry/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/telemetry/telemetry.py +0 -0
- {graphiti_core-0.18.9/server/graph_service → graphiti_core-0.19.0/graphiti_core/utils}/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/utils/maintenance/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/utils/maintenance/node_operations.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/utils/maintenance/temporal_operations.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/utils/maintenance/utils.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/graphiti_core/utils/ontology_utils/entity_types_utils.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/images/arxiv-screenshot.png +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/images/graphiti-graph-intro.gif +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/images/graphiti-intro-slides-stock-2.gif +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/images/simple_graph.svg +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/mcp_server/.env.example +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/mcp_server/.python-version +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/mcp_server/Dockerfile +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/mcp_server/README.md +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/mcp_server/cursor_rules.md +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/mcp_server/docker-compose.yml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/mcp_server/graphiti_mcp_server.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/mcp_server/mcp_config_sse_example.json +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/mcp_server/mcp_config_stdio_example.json +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/mcp_server/pyproject.toml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/mcp_server/uv.lock +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/poetry.lock +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/py.typed +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/.env.example +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/Makefile +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/README.md +0 -0
- {graphiti_core-0.18.9/server/graph_service/routers → graphiti_core-0.19.0/server/graph_service}/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/graph_service/config.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/graph_service/dto/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/graph_service/dto/common.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/graph_service/dto/ingest.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/graph_service/dto/retrieve.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/graph_service/main.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/graph_service/routers/ingest.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/graph_service/routers/retrieve.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/graph_service/zep_graphiti.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/pyproject.toml +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/server/uv.lock +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/cross_encoder/test_bge_reranker_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/cross_encoder/test_gemini_reranker_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/driver/__init__.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/driver/test_falkordb_driver.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/embedder/embedder_fixtures.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/embedder/test_gemini.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/embedder/test_openai.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/embedder/test_voyage.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/evals/data/longmemeval_data/README.md +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/evals/data/longmemeval_data/longmemeval_oracle.json +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/evals/eval_cli.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/evals/eval_e2e_graph_building.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/evals/pytest.ini +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/evals/utils.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/llm_client/test_anthropic_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/llm_client/test_anthropic_client_int.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/llm_client/test_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/llm_client/test_errors.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/llm_client/test_gemini_client.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/utils/maintenance/test_edge_operations.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/utils/maintenance/test_temporal_operations_int.py +0 -0
- {graphiti_core-0.18.9 → graphiti_core-0.19.0}/tests/utils/search/search_utils_test.py +0 -0
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
name: AI Moderator
|
|
2
|
+
on:
|
|
3
|
+
issues:
|
|
4
|
+
types: [opened]
|
|
5
|
+
issue_comment:
|
|
6
|
+
types: [created]
|
|
7
|
+
pull_request_review_comment:
|
|
8
|
+
types: [created]
|
|
9
|
+
|
|
10
|
+
jobs:
|
|
11
|
+
spam-detection:
|
|
12
|
+
runs-on: ubuntu-latest
|
|
13
|
+
permissions:
|
|
14
|
+
issues: write
|
|
15
|
+
pull-requests: write
|
|
16
|
+
models: read
|
|
17
|
+
contents: read
|
|
18
|
+
steps:
|
|
19
|
+
- uses: actions/checkout@v4
|
|
20
|
+
- uses: github/ai-moderator@v1
|
|
21
|
+
with:
|
|
22
|
+
token: ${{ secrets.GITHUB_TOKEN }}
|
|
23
|
+
spam-label: 'spam'
|
|
24
|
+
ai-label: 'ai-generated'
|
|
25
|
+
minimize-detected-comments: true
|
|
26
|
+
# Built-in prompt configuration (all enabled by default)
|
|
27
|
+
enable-spam-detection: true
|
|
28
|
+
enable-link-spam-detection: true
|
|
29
|
+
enable-ai-detection: true
|
|
30
|
+
# custom-prompt-path: '.github/prompts/my-custom.prompt.yml' # Optional
|
|
@@ -28,7 +28,7 @@ jobs:
|
|
|
28
28
|
path-to-document: "https://github.com/getzep/graphiti/blob/main/Zep-CLA.md" # e.g. a CLA or a DCO document
|
|
29
29
|
# branch should not be protected unless a personal PAT is used
|
|
30
30
|
branch: "main"
|
|
31
|
-
allowlist: paul-paliychuk,prasmussen15,danielchalef,dependabot[bot],ellipsis-dev,Claude[bot]
|
|
31
|
+
allowlist: paul-paliychuk,prasmussen15,danielchalef,dependabot[bot],ellipsis-dev,Claude[bot],claude[bot]
|
|
32
32
|
|
|
33
33
|
# the followings are the optional inputs - If the optional inputs are not given, then default values will be taken
|
|
34
34
|
#remote-organization-name: enter the remote organization name where the signatures should be stored (Default is storing the signatures in the same repository)
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
name: Claude PR Auto Review
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
pull_request:
|
|
5
|
+
types: [opened, synchronize]
|
|
6
|
+
|
|
7
|
+
jobs:
|
|
8
|
+
auto-review:
|
|
9
|
+
runs-on: ubuntu-latest
|
|
10
|
+
permissions:
|
|
11
|
+
contents: read
|
|
12
|
+
pull-requests: write
|
|
13
|
+
id-token: write
|
|
14
|
+
steps:
|
|
15
|
+
- name: Checkout repository
|
|
16
|
+
uses: actions/checkout@v4
|
|
17
|
+
with:
|
|
18
|
+
fetch-depth: 1
|
|
19
|
+
|
|
20
|
+
- name: Automatic PR Review
|
|
21
|
+
uses: anthropics/claude-code-action@v1
|
|
22
|
+
with:
|
|
23
|
+
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
|
24
|
+
use_sticky_comment: true
|
|
25
|
+
allowed_bots: "dependabot"
|
|
26
|
+
prompt: |
|
|
27
|
+
REPO: ${{ github.repository }}
|
|
28
|
+
PR NUMBER: ${{ github.event.pull_request.number }}
|
|
29
|
+
|
|
30
|
+
Please review this pull request.
|
|
31
|
+
|
|
32
|
+
Note: The PR branch is already checked out in the current working directory.
|
|
33
|
+
|
|
34
|
+
Focus on:
|
|
35
|
+
- Code quality and best practices
|
|
36
|
+
- Potential bugs or issues
|
|
37
|
+
- Performance considerations
|
|
38
|
+
- Security implications
|
|
39
|
+
- Test coverage
|
|
40
|
+
- Documentation updates if needed
|
|
41
|
+
- Verify that README.md and docs are updated for any new features or config changes
|
|
42
|
+
|
|
43
|
+
Provide constructive feedback with specific suggestions for improvement.
|
|
44
|
+
Use `gh pr comment:*` for top-level comments.
|
|
45
|
+
Use `mcp__github_inline_comment__create_inline_comment` to highlight specific areas of concern.
|
|
46
|
+
Only your GitHub comments that you post will be seen, so don't submit your review as a normal message, just as comments.
|
|
47
|
+
If the PR has already been reviewed, or there are no noteworthy changes, don't post anything.
|
|
48
|
+
|
|
49
|
+
claude_args: |
|
|
50
|
+
--allowedTools "mcp__github_inline_comment__create_inline_comment,Bash(gh pr comment:*), Bash(gh pr diff:*), Bash(gh pr view:*)"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: graphiti-core
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.19.0
|
|
4
4
|
Summary: A temporal graph building library
|
|
5
5
|
Project-URL: Homepage, https://help.getzep.com/graphiti/graphiti/overview
|
|
6
6
|
Project-URL: Repository, https://github.com/getzep/graphiti
|
|
@@ -26,11 +26,12 @@ Requires-Dist: google-genai>=1.8.0; extra == 'dev'
|
|
|
26
26
|
Requires-Dist: groq>=0.2.0; extra == 'dev'
|
|
27
27
|
Requires-Dist: ipykernel>=6.29.5; extra == 'dev'
|
|
28
28
|
Requires-Dist: jupyterlab>=4.2.4; extra == 'dev'
|
|
29
|
+
Requires-Dist: kuzu>=0.11.2; extra == 'dev'
|
|
29
30
|
Requires-Dist: langchain-anthropic>=0.2.4; extra == 'dev'
|
|
30
31
|
Requires-Dist: langchain-openai>=0.2.6; extra == 'dev'
|
|
31
32
|
Requires-Dist: langgraph>=0.2.15; extra == 'dev'
|
|
32
33
|
Requires-Dist: langsmith>=0.1.108; extra == 'dev'
|
|
33
|
-
Requires-Dist: pyright>=1.1.
|
|
34
|
+
Requires-Dist: pyright>=1.1.404; extra == 'dev'
|
|
34
35
|
Requires-Dist: pytest-asyncio>=0.24.0; extra == 'dev'
|
|
35
36
|
Requires-Dist: pytest-xdist>=3.6.1; extra == 'dev'
|
|
36
37
|
Requires-Dist: pytest>=8.3.3; extra == 'dev'
|
|
@@ -44,6 +45,12 @@ Provides-Extra: google-genai
|
|
|
44
45
|
Requires-Dist: google-genai>=1.8.0; extra == 'google-genai'
|
|
45
46
|
Provides-Extra: groq
|
|
46
47
|
Requires-Dist: groq>=0.2.0; extra == 'groq'
|
|
48
|
+
Provides-Extra: kuzu
|
|
49
|
+
Requires-Dist: kuzu>=0.11.2; extra == 'kuzu'
|
|
50
|
+
Provides-Extra: neptune
|
|
51
|
+
Requires-Dist: boto3>=1.39.16; extra == 'neptune'
|
|
52
|
+
Requires-Dist: langchain-aws>=0.2.29; extra == 'neptune'
|
|
53
|
+
Requires-Dist: opensearch-py>=3.0.0; extra == 'neptune'
|
|
47
54
|
Provides-Extra: sentence-transformers
|
|
48
55
|
Requires-Dist: sentence-transformers>=3.2.1; extra == 'sentence-transformers'
|
|
49
56
|
Provides-Extra: voyageai
|
|
@@ -96,7 +103,7 @@ Use Graphiti to:
|
|
|
96
103
|
<br />
|
|
97
104
|
|
|
98
105
|
<p align="center">
|
|
99
|
-
<img src="images/graphiti-graph-intro.gif" alt="Graphiti temporal walkthrough" width="700px">
|
|
106
|
+
<img src="images/graphiti-graph-intro.gif" alt="Graphiti temporal walkthrough" width="700px">
|
|
100
107
|
</p>
|
|
101
108
|
|
|
102
109
|
<br />
|
|
@@ -132,7 +139,7 @@ Traditional RAG approaches often rely on batch processing and static data summar
|
|
|
132
139
|
- **Scalability:** Efficiently manages large datasets with parallel processing, suitable for enterprise environments.
|
|
133
140
|
|
|
134
141
|
<p align="center">
|
|
135
|
-
<img src="/images/graphiti-intro-slides-stock-2.gif" alt="Graphiti structured + unstructured demo" width="700px">
|
|
142
|
+
<img src="/images/graphiti-intro-slides-stock-2.gif" alt="Graphiti structured + unstructured demo" width="700px">
|
|
136
143
|
</p>
|
|
137
144
|
|
|
138
145
|
## Graphiti vs. GraphRAG
|
|
@@ -157,7 +164,7 @@ Graphiti is specifically designed to address the challenges of dynamic and frequ
|
|
|
157
164
|
Requirements:
|
|
158
165
|
|
|
159
166
|
- Python 3.10 or higher
|
|
160
|
-
- Neo4j 5.26 / FalkorDB 1.1.2 or
|
|
167
|
+
- Neo4j 5.26 / FalkorDB 1.1.2 / Kuzu 0.11.2 / Amazon Neptune Database Cluster or Neptune Analytics Graph + Amazon OpenSearch Serverless collection (serves as the full text search backend)
|
|
161
168
|
- OpenAI API key (Graphiti defaults to OpenAI for LLM inference and embedding)
|
|
162
169
|
|
|
163
170
|
> [!IMPORTANT]
|
|
@@ -200,6 +207,28 @@ pip install graphiti-core[falkordb]
|
|
|
200
207
|
uv add graphiti-core[falkordb]
|
|
201
208
|
```
|
|
202
209
|
|
|
210
|
+
### Installing with Kuzu Support
|
|
211
|
+
|
|
212
|
+
If you plan to use Kuzu as your graph database backend, install with the Kuzu extra:
|
|
213
|
+
|
|
214
|
+
```bash
|
|
215
|
+
pip install graphiti-core[kuzu]
|
|
216
|
+
|
|
217
|
+
# or with uv
|
|
218
|
+
uv add graphiti-core[kuzu]
|
|
219
|
+
```
|
|
220
|
+
|
|
221
|
+
### Installing with Amazon Neptune Support
|
|
222
|
+
|
|
223
|
+
If you plan to use Amazon Neptune as your graph database backend, install with the Amazon Neptune extra:
|
|
224
|
+
|
|
225
|
+
```bash
|
|
226
|
+
pip install graphiti-core[neptune]
|
|
227
|
+
|
|
228
|
+
# or with uv
|
|
229
|
+
uv add graphiti-core[neptune]
|
|
230
|
+
```
|
|
231
|
+
|
|
203
232
|
### You can also install optional LLM providers as extras:
|
|
204
233
|
|
|
205
234
|
```bash
|
|
@@ -217,6 +246,9 @@ pip install graphiti-core[anthropic,groq,google-genai]
|
|
|
217
246
|
|
|
218
247
|
# Install with FalkorDB and LLM providers
|
|
219
248
|
pip install graphiti-core[falkordb,anthropic,google-genai]
|
|
249
|
+
|
|
250
|
+
# Install with Amazon Neptune
|
|
251
|
+
pip install graphiti-core[neptune]
|
|
220
252
|
```
|
|
221
253
|
|
|
222
254
|
## Default to Low Concurrency; LLM Provider 429 Rate Limit Errors
|
|
@@ -236,7 +268,7 @@ If your LLM provider allows higher throughput, you can increase `SEMAPHORE_LIMIT
|
|
|
236
268
|
|
|
237
269
|
For a complete working example, see the [Quickstart Example](./examples/quickstart/README.md) in the examples directory. The quickstart demonstrates:
|
|
238
270
|
|
|
239
|
-
1. Connecting to a Neo4j
|
|
271
|
+
1. Connecting to a Neo4j, Amazon Neptune, FalkorDB, or Kuzu database
|
|
240
272
|
2. Initializing Graphiti indices and constraints
|
|
241
273
|
3. Adding episodes to the graph (both text and structured JSON)
|
|
242
274
|
4. Searching for relationships (edges) using hybrid search
|
|
@@ -319,6 +351,39 @@ driver = FalkorDriver(
|
|
|
319
351
|
graphiti = Graphiti(graph_driver=driver)
|
|
320
352
|
```
|
|
321
353
|
|
|
354
|
+
#### Kuzu
|
|
355
|
+
|
|
356
|
+
```python
|
|
357
|
+
from graphiti_core import Graphiti
|
|
358
|
+
from graphiti_core.driver.kuzu_driver import KuzuDriver
|
|
359
|
+
|
|
360
|
+
# Create a Kuzu driver
|
|
361
|
+
driver = KuzuDriver(db="/tmp/graphiti.kuzu")
|
|
362
|
+
|
|
363
|
+
# Pass the driver to Graphiti
|
|
364
|
+
graphiti = Graphiti(graph_driver=driver)
|
|
365
|
+
```
|
|
366
|
+
|
|
367
|
+
#### Amazon Neptune
|
|
368
|
+
|
|
369
|
+
```python
|
|
370
|
+
from graphiti_core import Graphiti
|
|
371
|
+
from graphiti_core.driver.neptune_driver import NeptuneDriver
|
|
372
|
+
|
|
373
|
+
# Create a FalkorDB driver with custom database name
|
|
374
|
+
driver = NeptuneDriver(
|
|
375
|
+
host=<NEPTUNE ENDPOINT>,
|
|
376
|
+
aoss_host=<Amazon OpenSearch Serverless Host>,
|
|
377
|
+
port=<PORT> # Optional, defaults to 8182,
|
|
378
|
+
aoss_port=<PORT> # Optional, defaults to 443
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
driver = NeptuneDriver(host=neptune_uri, aoss_host=aoss_host, port=neptune_port)
|
|
382
|
+
|
|
383
|
+
# Pass the driver to Graphiti
|
|
384
|
+
graphiti = Graphiti(graph_driver=driver)
|
|
385
|
+
```
|
|
386
|
+
|
|
322
387
|
|
|
323
388
|
### Performance Configuration
|
|
324
389
|
|
|
@@ -331,6 +396,13 @@ as such this feature is off by default.
|
|
|
331
396
|
|
|
332
397
|
Graphiti supports Azure OpenAI for both LLM inference and embeddings. Azure deployments often require different endpoints for LLM and embedding services, and separate deployments for default and small models.
|
|
333
398
|
|
|
399
|
+
> [!IMPORTANT]
|
|
400
|
+
> **Azure OpenAI v1 API Opt-in Required for Structured Outputs**
|
|
401
|
+
>
|
|
402
|
+
> Graphiti uses structured outputs via the `client.beta.chat.completions.parse()` method, which requires Azure OpenAI deployments to opt into the v1 API. Without this opt-in, you'll encounter 404 Resource not found errors during episode ingestion.
|
|
403
|
+
>
|
|
404
|
+
> To enable v1 API support in your Azure OpenAI deployment, follow Microsoft's guide: [Azure OpenAI API version lifecycle](https://learn.microsoft.com/en-us/azure/ai-foundry/openai/api-version-lifecycle?tabs=key#api-evolution).
|
|
405
|
+
|
|
334
406
|
```python
|
|
335
407
|
from openai import AsyncAzureOpenAI
|
|
336
408
|
from graphiti_core import Graphiti
|
|
@@ -449,25 +521,27 @@ The Gemini reranker uses the `gemini-2.5-flash-lite-preview-06-17` model by defa
|
|
|
449
521
|
Graphiti supports Ollama for running local LLMs and embedding models via Ollama's OpenAI-compatible API. This is ideal for privacy-focused applications or when you want to avoid API costs.
|
|
450
522
|
|
|
451
523
|
Install the models:
|
|
524
|
+
```bash
|
|
452
525
|
ollama pull deepseek-r1:7b # LLM
|
|
453
526
|
ollama pull nomic-embed-text # embeddings
|
|
527
|
+
```
|
|
454
528
|
|
|
455
529
|
```python
|
|
456
530
|
from graphiti_core import Graphiti
|
|
457
531
|
from graphiti_core.llm_client.config import LLMConfig
|
|
458
|
-
from graphiti_core.llm_client.
|
|
532
|
+
from graphiti_core.llm_client.openai_generic_client import OpenAIGenericClient
|
|
459
533
|
from graphiti_core.embedder.openai import OpenAIEmbedder, OpenAIEmbedderConfig
|
|
460
534
|
from graphiti_core.cross_encoder.openai_reranker_client import OpenAIRerankerClient
|
|
461
535
|
|
|
462
536
|
# Configure Ollama LLM client
|
|
463
537
|
llm_config = LLMConfig(
|
|
464
|
-
api_key="
|
|
538
|
+
api_key="ollama", # Ollama doesn't require a real API key, but some placeholder is needed
|
|
465
539
|
model="deepseek-r1:7b",
|
|
466
540
|
small_model="deepseek-r1:7b",
|
|
467
|
-
base_url="http://localhost:11434/v1",
|
|
541
|
+
base_url="http://localhost:11434/v1", # Ollama's OpenAI-compatible endpoint
|
|
468
542
|
)
|
|
469
543
|
|
|
470
|
-
llm_client =
|
|
544
|
+
llm_client = OpenAIGenericClient(config=llm_config)
|
|
471
545
|
|
|
472
546
|
# Initialize Graphiti with Ollama clients
|
|
473
547
|
graphiti = Graphiti(
|
|
@@ -477,7 +551,7 @@ graphiti = Graphiti(
|
|
|
477
551
|
llm_client=llm_client,
|
|
478
552
|
embedder=OpenAIEmbedder(
|
|
479
553
|
config=OpenAIEmbedderConfig(
|
|
480
|
-
api_key="
|
|
554
|
+
api_key="ollama", # Placeholder API key
|
|
481
555
|
embedding_model="nomic-embed-text",
|
|
482
556
|
embedding_dim=768,
|
|
483
557
|
base_url="http://localhost:11434/v1",
|
|
@@ -495,7 +569,7 @@ Ensure Ollama is running (`ollama serve`) and that you have pulled the models yo
|
|
|
495
569
|
|
|
496
570
|
- [Guides and API documentation](https://help.getzep.com/graphiti).
|
|
497
571
|
- [Quick Start](https://help.getzep.com/graphiti/graphiti/quick-start)
|
|
498
|
-
- [Building an agent with LangChain's LangGraph and Graphiti](https://help.getzep.com/graphiti/
|
|
572
|
+
- [Building an agent with LangChain's LangGraph and Graphiti](https://help.getzep.com/graphiti/integrations/lang-graph-agent)
|
|
499
573
|
|
|
500
574
|
## Telemetry
|
|
501
575
|
|
|
@@ -510,7 +584,7 @@ When you initialize a Graphiti instance, we collect:
|
|
|
510
584
|
- **Graphiti version**: The version you're using
|
|
511
585
|
- **Configuration choices**:
|
|
512
586
|
- LLM provider type (OpenAI, Azure, Anthropic, etc.)
|
|
513
|
-
- Database backend (Neo4j, FalkorDB)
|
|
587
|
+
- Database backend (Neo4j, FalkorDB, Kuzu, Amazon Neptune Database or Neptune Analytics)
|
|
514
588
|
- Embedder provider (OpenAI, Azure, Voyage, etc.)
|
|
515
589
|
|
|
516
590
|
### What We Don't Collect
|
|
@@ -44,7 +44,7 @@ Use Graphiti to:
|
|
|
44
44
|
<br />
|
|
45
45
|
|
|
46
46
|
<p align="center">
|
|
47
|
-
<img src="images/graphiti-graph-intro.gif" alt="Graphiti temporal walkthrough" width="700px">
|
|
47
|
+
<img src="images/graphiti-graph-intro.gif" alt="Graphiti temporal walkthrough" width="700px">
|
|
48
48
|
</p>
|
|
49
49
|
|
|
50
50
|
<br />
|
|
@@ -80,7 +80,7 @@ Traditional RAG approaches often rely on batch processing and static data summar
|
|
|
80
80
|
- **Scalability:** Efficiently manages large datasets with parallel processing, suitable for enterprise environments.
|
|
81
81
|
|
|
82
82
|
<p align="center">
|
|
83
|
-
<img src="/images/graphiti-intro-slides-stock-2.gif" alt="Graphiti structured + unstructured demo" width="700px">
|
|
83
|
+
<img src="/images/graphiti-intro-slides-stock-2.gif" alt="Graphiti structured + unstructured demo" width="700px">
|
|
84
84
|
</p>
|
|
85
85
|
|
|
86
86
|
## Graphiti vs. GraphRAG
|
|
@@ -105,7 +105,7 @@ Graphiti is specifically designed to address the challenges of dynamic and frequ
|
|
|
105
105
|
Requirements:
|
|
106
106
|
|
|
107
107
|
- Python 3.10 or higher
|
|
108
|
-
- Neo4j 5.26 / FalkorDB 1.1.2 or
|
|
108
|
+
- Neo4j 5.26 / FalkorDB 1.1.2 / Kuzu 0.11.2 / Amazon Neptune Database Cluster or Neptune Analytics Graph + Amazon OpenSearch Serverless collection (serves as the full text search backend)
|
|
109
109
|
- OpenAI API key (Graphiti defaults to OpenAI for LLM inference and embedding)
|
|
110
110
|
|
|
111
111
|
> [!IMPORTANT]
|
|
@@ -148,6 +148,28 @@ pip install graphiti-core[falkordb]
|
|
|
148
148
|
uv add graphiti-core[falkordb]
|
|
149
149
|
```
|
|
150
150
|
|
|
151
|
+
### Installing with Kuzu Support
|
|
152
|
+
|
|
153
|
+
If you plan to use Kuzu as your graph database backend, install with the Kuzu extra:
|
|
154
|
+
|
|
155
|
+
```bash
|
|
156
|
+
pip install graphiti-core[kuzu]
|
|
157
|
+
|
|
158
|
+
# or with uv
|
|
159
|
+
uv add graphiti-core[kuzu]
|
|
160
|
+
```
|
|
161
|
+
|
|
162
|
+
### Installing with Amazon Neptune Support
|
|
163
|
+
|
|
164
|
+
If you plan to use Amazon Neptune as your graph database backend, install with the Amazon Neptune extra:
|
|
165
|
+
|
|
166
|
+
```bash
|
|
167
|
+
pip install graphiti-core[neptune]
|
|
168
|
+
|
|
169
|
+
# or with uv
|
|
170
|
+
uv add graphiti-core[neptune]
|
|
171
|
+
```
|
|
172
|
+
|
|
151
173
|
### You can also install optional LLM providers as extras:
|
|
152
174
|
|
|
153
175
|
```bash
|
|
@@ -165,6 +187,9 @@ pip install graphiti-core[anthropic,groq,google-genai]
|
|
|
165
187
|
|
|
166
188
|
# Install with FalkorDB and LLM providers
|
|
167
189
|
pip install graphiti-core[falkordb,anthropic,google-genai]
|
|
190
|
+
|
|
191
|
+
# Install with Amazon Neptune
|
|
192
|
+
pip install graphiti-core[neptune]
|
|
168
193
|
```
|
|
169
194
|
|
|
170
195
|
## Default to Low Concurrency; LLM Provider 429 Rate Limit Errors
|
|
@@ -184,7 +209,7 @@ If your LLM provider allows higher throughput, you can increase `SEMAPHORE_LIMIT
|
|
|
184
209
|
|
|
185
210
|
For a complete working example, see the [Quickstart Example](./examples/quickstart/README.md) in the examples directory. The quickstart demonstrates:
|
|
186
211
|
|
|
187
|
-
1. Connecting to a Neo4j
|
|
212
|
+
1. Connecting to a Neo4j, Amazon Neptune, FalkorDB, or Kuzu database
|
|
188
213
|
2. Initializing Graphiti indices and constraints
|
|
189
214
|
3. Adding episodes to the graph (both text and structured JSON)
|
|
190
215
|
4. Searching for relationships (edges) using hybrid search
|
|
@@ -267,6 +292,39 @@ driver = FalkorDriver(
|
|
|
267
292
|
graphiti = Graphiti(graph_driver=driver)
|
|
268
293
|
```
|
|
269
294
|
|
|
295
|
+
#### Kuzu
|
|
296
|
+
|
|
297
|
+
```python
|
|
298
|
+
from graphiti_core import Graphiti
|
|
299
|
+
from graphiti_core.driver.kuzu_driver import KuzuDriver
|
|
300
|
+
|
|
301
|
+
# Create a Kuzu driver
|
|
302
|
+
driver = KuzuDriver(db="/tmp/graphiti.kuzu")
|
|
303
|
+
|
|
304
|
+
# Pass the driver to Graphiti
|
|
305
|
+
graphiti = Graphiti(graph_driver=driver)
|
|
306
|
+
```
|
|
307
|
+
|
|
308
|
+
#### Amazon Neptune
|
|
309
|
+
|
|
310
|
+
```python
|
|
311
|
+
from graphiti_core import Graphiti
|
|
312
|
+
from graphiti_core.driver.neptune_driver import NeptuneDriver
|
|
313
|
+
|
|
314
|
+
# Create a FalkorDB driver with custom database name
|
|
315
|
+
driver = NeptuneDriver(
|
|
316
|
+
host=<NEPTUNE ENDPOINT>,
|
|
317
|
+
aoss_host=<Amazon OpenSearch Serverless Host>,
|
|
318
|
+
port=<PORT> # Optional, defaults to 8182,
|
|
319
|
+
aoss_port=<PORT> # Optional, defaults to 443
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
driver = NeptuneDriver(host=neptune_uri, aoss_host=aoss_host, port=neptune_port)
|
|
323
|
+
|
|
324
|
+
# Pass the driver to Graphiti
|
|
325
|
+
graphiti = Graphiti(graph_driver=driver)
|
|
326
|
+
```
|
|
327
|
+
|
|
270
328
|
|
|
271
329
|
### Performance Configuration
|
|
272
330
|
|
|
@@ -279,6 +337,13 @@ as such this feature is off by default.
|
|
|
279
337
|
|
|
280
338
|
Graphiti supports Azure OpenAI for both LLM inference and embeddings. Azure deployments often require different endpoints for LLM and embedding services, and separate deployments for default and small models.
|
|
281
339
|
|
|
340
|
+
> [!IMPORTANT]
|
|
341
|
+
> **Azure OpenAI v1 API Opt-in Required for Structured Outputs**
|
|
342
|
+
>
|
|
343
|
+
> Graphiti uses structured outputs via the `client.beta.chat.completions.parse()` method, which requires Azure OpenAI deployments to opt into the v1 API. Without this opt-in, you'll encounter 404 Resource not found errors during episode ingestion.
|
|
344
|
+
>
|
|
345
|
+
> To enable v1 API support in your Azure OpenAI deployment, follow Microsoft's guide: [Azure OpenAI API version lifecycle](https://learn.microsoft.com/en-us/azure/ai-foundry/openai/api-version-lifecycle?tabs=key#api-evolution).
|
|
346
|
+
|
|
282
347
|
```python
|
|
283
348
|
from openai import AsyncAzureOpenAI
|
|
284
349
|
from graphiti_core import Graphiti
|
|
@@ -397,25 +462,27 @@ The Gemini reranker uses the `gemini-2.5-flash-lite-preview-06-17` model by defa
|
|
|
397
462
|
Graphiti supports Ollama for running local LLMs and embedding models via Ollama's OpenAI-compatible API. This is ideal for privacy-focused applications or when you want to avoid API costs.
|
|
398
463
|
|
|
399
464
|
Install the models:
|
|
465
|
+
```bash
|
|
400
466
|
ollama pull deepseek-r1:7b # LLM
|
|
401
467
|
ollama pull nomic-embed-text # embeddings
|
|
468
|
+
```
|
|
402
469
|
|
|
403
470
|
```python
|
|
404
471
|
from graphiti_core import Graphiti
|
|
405
472
|
from graphiti_core.llm_client.config import LLMConfig
|
|
406
|
-
from graphiti_core.llm_client.
|
|
473
|
+
from graphiti_core.llm_client.openai_generic_client import OpenAIGenericClient
|
|
407
474
|
from graphiti_core.embedder.openai import OpenAIEmbedder, OpenAIEmbedderConfig
|
|
408
475
|
from graphiti_core.cross_encoder.openai_reranker_client import OpenAIRerankerClient
|
|
409
476
|
|
|
410
477
|
# Configure Ollama LLM client
|
|
411
478
|
llm_config = LLMConfig(
|
|
412
|
-
api_key="
|
|
479
|
+
api_key="ollama", # Ollama doesn't require a real API key, but some placeholder is needed
|
|
413
480
|
model="deepseek-r1:7b",
|
|
414
481
|
small_model="deepseek-r1:7b",
|
|
415
|
-
base_url="http://localhost:11434/v1",
|
|
482
|
+
base_url="http://localhost:11434/v1", # Ollama's OpenAI-compatible endpoint
|
|
416
483
|
)
|
|
417
484
|
|
|
418
|
-
llm_client =
|
|
485
|
+
llm_client = OpenAIGenericClient(config=llm_config)
|
|
419
486
|
|
|
420
487
|
# Initialize Graphiti with Ollama clients
|
|
421
488
|
graphiti = Graphiti(
|
|
@@ -425,7 +492,7 @@ graphiti = Graphiti(
|
|
|
425
492
|
llm_client=llm_client,
|
|
426
493
|
embedder=OpenAIEmbedder(
|
|
427
494
|
config=OpenAIEmbedderConfig(
|
|
428
|
-
api_key="
|
|
495
|
+
api_key="ollama", # Placeholder API key
|
|
429
496
|
embedding_model="nomic-embed-text",
|
|
430
497
|
embedding_dim=768,
|
|
431
498
|
base_url="http://localhost:11434/v1",
|
|
@@ -443,7 +510,7 @@ Ensure Ollama is running (`ollama serve`) and that you have pulled the models yo
|
|
|
443
510
|
|
|
444
511
|
- [Guides and API documentation](https://help.getzep.com/graphiti).
|
|
445
512
|
- [Quick Start](https://help.getzep.com/graphiti/graphiti/quick-start)
|
|
446
|
-
- [Building an agent with LangChain's LangGraph and Graphiti](https://help.getzep.com/graphiti/
|
|
513
|
+
- [Building an agent with LangChain's LangGraph and Graphiti](https://help.getzep.com/graphiti/integrations/lang-graph-agent)
|
|
447
514
|
|
|
448
515
|
## Telemetry
|
|
449
516
|
|
|
@@ -458,7 +525,7 @@ When you initialize a Graphiti instance, we collect:
|
|
|
458
525
|
- **Graphiti version**: The version you're using
|
|
459
526
|
- **Configuration choices**:
|
|
460
527
|
- LLM provider type (OpenAI, Azure, Anthropic, etc.)
|
|
461
|
-
- Database backend (Neo4j, FalkorDB)
|
|
528
|
+
- Database backend (Neo4j, FalkorDB, Kuzu, Amazon Neptune Database or Neptune Analytics)
|
|
462
529
|
- Embedder provider (OpenAI, Azure, Voyage, etc.)
|
|
463
530
|
|
|
464
531
|
### What We Don't Collect
|
|
@@ -4,3 +4,7 @@ import sys
|
|
|
4
4
|
# This code adds the project root directory to the Python path, allowing imports to work correctly when running tests.
|
|
5
5
|
# Without this file, you might encounter ModuleNotFoundError when trying to import modules from your project, especially when running tests.
|
|
6
6
|
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__))))
|
|
7
|
+
|
|
8
|
+
from tests.helpers_test import graph_driver, mock_embedder
|
|
9
|
+
|
|
10
|
+
__all__ = ['graph_driver', 'mock_embedder']
|
|
@@ -18,6 +18,8 @@ This example demonstrates the basic functionality of Graphiti, including:
|
|
|
18
18
|
- A local DBMS created and started in Neo4j Desktop
|
|
19
19
|
- **For FalkorDB**:
|
|
20
20
|
- FalkorDB server running (see [FalkorDB documentation](https://falkordb.com/docs/) for setup)
|
|
21
|
+
- **For Amazon Neptune**:
|
|
22
|
+
- Amazon server running (see [Amazon Neptune documentation](https://aws.amazon.com/neptune/developer-resources/) for setup)
|
|
21
23
|
|
|
22
24
|
|
|
23
25
|
## Setup Instructions
|
|
@@ -42,9 +44,19 @@ export NEO4J_PASSWORD=password
|
|
|
42
44
|
# Optional FalkorDB connection parameters (defaults shown)
|
|
43
45
|
export FALKORDB_URI=falkor://localhost:6379
|
|
44
46
|
|
|
47
|
+
# Optional Amazon Neptune connection parameters
|
|
48
|
+
NEPTUNE_HOST=your_neptune_host
|
|
49
|
+
NEPTUNE_PORT=your_port_or_8182
|
|
50
|
+
AOSS_HOST=your_aoss_host
|
|
51
|
+
AOSS_PORT=your_port_or_443
|
|
52
|
+
|
|
45
53
|
# To use a different database, modify the driver constructor in the script
|
|
46
54
|
```
|
|
47
55
|
|
|
56
|
+
TIP: For Amazon Neptune host string please use the following formats
|
|
57
|
+
* For Neptune Database: `neptune-db://<cluster endpoint>`
|
|
58
|
+
* For Neptune Analytics: `neptune-graph://<graph identifier>`
|
|
59
|
+
|
|
48
60
|
3. Run the example:
|
|
49
61
|
|
|
50
62
|
```bash
|
|
@@ -52,11 +64,14 @@ python quickstart_neo4j.py
|
|
|
52
64
|
|
|
53
65
|
# For FalkorDB
|
|
54
66
|
python quickstart_falkordb.py
|
|
67
|
+
|
|
68
|
+
# For Amazon Neptune
|
|
69
|
+
python quickstart_neptune.py
|
|
55
70
|
```
|
|
56
71
|
|
|
57
72
|
## What This Example Demonstrates
|
|
58
73
|
|
|
59
|
-
- **Graph Initialization**: Setting up the Graphiti indices and constraints in Neo4j or FalkorDB
|
|
74
|
+
- **Graph Initialization**: Setting up the Graphiti indices and constraints in Neo4j, Amazon Neptune, or FalkorDB
|
|
60
75
|
- **Adding Episodes**: Adding text content that will be analyzed and converted into knowledge graph nodes and edges
|
|
61
76
|
- **Edge Search Functionality**: Performing hybrid searches that combine semantic similarity and BM25 retrieval to find relationships (edges)
|
|
62
77
|
- **Graph-Aware Search**: Using the source node UUID from the top search result to rerank additional search results based on graph distance
|