graphiti-core 0.15.0__tar.gz → 0.15.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of graphiti-core might be problematic. Click here for more details.
- graphiti_core-0.15.1/.github/workflows/mcp-server-docker.yml +73 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/CONTRIBUTING.md +62 -1
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/PKG-INFO +23 -4
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/README.md +15 -1
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/quickstart/quickstart_falkordb.py +5 -2
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/cross_encoder/__init__.py +1 -2
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/cross_encoder/bge_reranker_client.py +12 -2
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/cross_encoder/gemini_reranker_client.py +16 -4
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/cross_encoder/openai_reranker_client.py +5 -3
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/driver/__init__.py +1 -2
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/driver/falkordb_driver.py +15 -4
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/embedder/gemini.py +14 -2
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/embedder/voyage.py +12 -1
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/graphiti.py +1 -1
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/anthropic_client.py +17 -4
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/gemini_client.py +16 -3
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/groq_client.py +14 -3
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/nodes.py +4 -4
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/search/search_utils.py +4 -2
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/graph_data_operations.py +2 -1
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/mcp_server/README.md +19 -3
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/mcp_server/docker-compose.yml +4 -3
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/mcp_server/graphiti_mcp_server.py +41 -32
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/mcp_server/pyproject.toml +1 -1
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/pyproject.toml +5 -2
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/signatures/version1/cla.json +32 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/driver/test_falkordb_driver.py +41 -65
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/test_graphiti_falkordb_int.py +4 -10
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/test_graphiti_int.py +6 -1
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/test_node_falkordb_int.py +6 -15
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/uv.lock +14 -4
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/.env.example +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/.github/dependabot.yml +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/.github/secret_scanning.yml +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/.github/workflows/cla.yml +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/.github/workflows/codeql.yml +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/.github/workflows/lint.yml +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/.github/workflows/release-graphiti-core.yml +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/.github/workflows/typecheck.yml +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/.github/workflows/unit_tests.yml +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/.gitignore +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/CLAUDE.md +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/CODE_OF_CONDUCT.md +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/Dockerfile +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/LICENSE +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/Makefile +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/SECURITY.md +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/Zep-CLA.md +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/conftest.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/depot.json +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/docker-compose.test.yml +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/docker-compose.yml +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/ellipsis.yaml +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/data/manybirds_products.json +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/ecommerce/runner.ipynb +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/ecommerce/runner.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/langgraph-agent/agent.ipynb +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/langgraph-agent/tinybirds-jess.png +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/podcast/podcast_runner.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/podcast/podcast_transcript.txt +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/podcast/transcript_parser.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/quickstart/README.md +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/quickstart/quickstart_neo4j.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/quickstart/requirements.txt +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/wizard_of_oz/parser.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/wizard_of_oz/runner.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/examples/wizard_of_oz/woo.txt +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/cross_encoder/client.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/driver/driver.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/driver/neo4j_driver.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/edges.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/embedder/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/embedder/azure_openai.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/embedder/client.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/embedder/openai.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/errors.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/graph_queries.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/graphiti_types.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/helpers.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/azure_openai_client.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/client.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/config.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/errors.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/openai_base_client.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/openai_client.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/openai_generic_client.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/utils.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/models/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/models/edges/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/models/edges/edge_db_queries.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/models/nodes/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/models/nodes/node_db_queries.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/prompts/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/prompts/dedupe_edges.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/prompts/dedupe_nodes.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/prompts/eval.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/prompts/extract_edge_dates.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/prompts/extract_edges.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/prompts/extract_nodes.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/prompts/invalidate_edges.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/prompts/lib.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/prompts/models.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/prompts/prompt_helpers.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/prompts/summarize_nodes.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/py.typed +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/search/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/search/search.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/search/search_config.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/search/search_config_recipes.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/search/search_filters.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/search/search_helpers.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/telemetry/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/telemetry/telemetry.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/utils/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/utils/bulk_utils.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/utils/datetime_utils.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/community_operations.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/edge_operations.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/node_operations.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/temporal_operations.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/utils.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/utils/ontology_utils/entity_types_utils.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/images/arxiv-screenshot.png +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/images/graphiti-graph-intro.gif +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/images/graphiti-intro-slides-stock-2.gif +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/images/simple_graph.svg +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/mcp_server/.env.example +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/mcp_server/.python-version +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/mcp_server/Dockerfile +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/mcp_server/cursor_rules.md +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/mcp_server/mcp_config_sse_example.json +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/mcp_server/mcp_config_stdio_example.json +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/mcp_server/uv.lock +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/poetry.lock +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/py.typed +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/pytest.ini +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/.env.example +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/Makefile +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/README.md +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/graph_service/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/graph_service/config.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/graph_service/dto/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/graph_service/dto/common.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/graph_service/dto/ingest.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/graph_service/dto/retrieve.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/graph_service/main.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/graph_service/routers/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/graph_service/routers/ingest.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/graph_service/routers/retrieve.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/graph_service/zep_graphiti.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/pyproject.toml +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/server/uv.lock +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/cross_encoder/test_bge_reranker_client.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/cross_encoder/test_gemini_reranker_client.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/driver/__init__.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/embedder/embedder_fixtures.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/embedder/test_gemini.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/embedder/test_openai.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/embedder/test_voyage.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/evals/data/longmemeval_data/README.md +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/evals/data/longmemeval_data/longmemeval_oracle.json +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/evals/eval_cli.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/evals/eval_e2e_graph_building.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/evals/pytest.ini +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/evals/utils.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/helpers_test.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/llm_client/test_anthropic_client.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/llm_client/test_anthropic_client_int.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/llm_client/test_client.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/llm_client/test_errors.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/llm_client/test_gemini_client.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/test_entity_exclusion_int.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/test_node_int.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/utils/maintenance/test_edge_operations.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/utils/maintenance/test_temporal_operations_int.py +0 -0
- {graphiti_core-0.15.0 → graphiti_core-0.15.1}/tests/utils/search/search_utils_test.py +0 -0
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
name: Build and Push MCP Server Docker Image
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
paths:
|
|
6
|
+
- "mcp_server/pyproject.toml"
|
|
7
|
+
branches:
|
|
8
|
+
- main
|
|
9
|
+
pull_request:
|
|
10
|
+
paths:
|
|
11
|
+
- "mcp_server/pyproject.toml"
|
|
12
|
+
branches:
|
|
13
|
+
- main
|
|
14
|
+
workflow_dispatch:
|
|
15
|
+
inputs:
|
|
16
|
+
push_image:
|
|
17
|
+
description: "Push image to registry (unchecked for testing)"
|
|
18
|
+
required: false
|
|
19
|
+
default: false
|
|
20
|
+
type: boolean
|
|
21
|
+
|
|
22
|
+
env:
|
|
23
|
+
REGISTRY: docker.io
|
|
24
|
+
IMAGE_NAME: zepai/knowledge-graph-mcp
|
|
25
|
+
|
|
26
|
+
jobs:
|
|
27
|
+
build-and-push:
|
|
28
|
+
runs-on: depot-ubuntu-24.04-small
|
|
29
|
+
environment: development
|
|
30
|
+
permissions:
|
|
31
|
+
contents: read
|
|
32
|
+
id-token: write
|
|
33
|
+
steps:
|
|
34
|
+
- name: Checkout repository
|
|
35
|
+
uses: actions/checkout@v4
|
|
36
|
+
|
|
37
|
+
- name: Extract version from pyproject.toml
|
|
38
|
+
id: version
|
|
39
|
+
run: |
|
|
40
|
+
VERSION=$(python -c "import tomllib; print(tomllib.load(open('mcp_server/pyproject.toml', 'rb'))['project']['version'])")
|
|
41
|
+
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
|
42
|
+
echo "tag=v$VERSION" >> $GITHUB_OUTPUT
|
|
43
|
+
- name: Log in to Docker Hub
|
|
44
|
+
if: github.event_name != 'pull_request' && (github.event_name != 'workflow_dispatch' || inputs.push_image)
|
|
45
|
+
uses: docker/login-action@v3
|
|
46
|
+
with:
|
|
47
|
+
registry: ${{ env.REGISTRY }}
|
|
48
|
+
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
49
|
+
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
50
|
+
|
|
51
|
+
- name: Set up Depot CLI
|
|
52
|
+
uses: depot/setup-action@v1
|
|
53
|
+
|
|
54
|
+
- name: Extract metadata
|
|
55
|
+
id: meta
|
|
56
|
+
uses: docker/metadata-action@v5
|
|
57
|
+
with:
|
|
58
|
+
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
|
59
|
+
tags: |
|
|
60
|
+
type=ref,event=branch
|
|
61
|
+
type=ref,event=pr
|
|
62
|
+
type=raw,value=${{ steps.version.outputs.tag }}
|
|
63
|
+
type=raw,value=latest,enable={{is_default_branch}}
|
|
64
|
+
|
|
65
|
+
- name: Depot build and push image
|
|
66
|
+
uses: depot/build-push-action@v1
|
|
67
|
+
with:
|
|
68
|
+
project: v9jv1mlpwc
|
|
69
|
+
context: ./mcp_server
|
|
70
|
+
platforms: linux/amd64,linux/arm64
|
|
71
|
+
push: ${{ github.event_name != 'pull_request' && (github.event_name != 'workflow_dispatch' || inputs.push_image) }}
|
|
72
|
+
tags: ${{ steps.meta.outputs.tags }}
|
|
73
|
+
labels: ${{ steps.meta.outputs.labels }}
|
|
@@ -116,7 +116,7 @@ Once you've found an issue tagged with "good first issue" or "help wanted," or p
|
|
|
116
116
|
We use several tools to maintain code quality:
|
|
117
117
|
|
|
118
118
|
- Ruff for linting and formatting
|
|
119
|
-
-
|
|
119
|
+
- Pyright for static type checking
|
|
120
120
|
- Pytest for testing
|
|
121
121
|
|
|
122
122
|
Before submitting a pull request, please run:
|
|
@@ -127,6 +127,67 @@ make check
|
|
|
127
127
|
|
|
128
128
|
This command will format your code, run linting checks, and execute tests.
|
|
129
129
|
|
|
130
|
+
## Third-Party Integrations
|
|
131
|
+
|
|
132
|
+
When contributing integrations for third-party services (LLM providers, embedding services, databases, etc.), please follow these patterns:
|
|
133
|
+
|
|
134
|
+
### Optional Dependencies
|
|
135
|
+
|
|
136
|
+
All third-party integrations must be optional dependencies to keep the core library lightweight. Follow this pattern:
|
|
137
|
+
|
|
138
|
+
1. **Add to `pyproject.toml`**: Define your dependency as an optional extra AND include it in the dev extra:
|
|
139
|
+
```toml
|
|
140
|
+
[project.optional-dependencies]
|
|
141
|
+
your-service = ["your-package>=1.0.0"]
|
|
142
|
+
dev = [
|
|
143
|
+
# ... existing dev dependencies
|
|
144
|
+
"your-package>=1.0.0", # Include all optional extras here
|
|
145
|
+
# ... other dependencies
|
|
146
|
+
]
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
2. **Use TYPE_CHECKING pattern**: In your integration module, import dependencies conditionally:
|
|
150
|
+
```python
|
|
151
|
+
from typing import TYPE_CHECKING
|
|
152
|
+
|
|
153
|
+
if TYPE_CHECKING:
|
|
154
|
+
import your_package
|
|
155
|
+
from your_package import SomeType
|
|
156
|
+
else:
|
|
157
|
+
try:
|
|
158
|
+
import your_package
|
|
159
|
+
from your_package import SomeType
|
|
160
|
+
except ImportError:
|
|
161
|
+
raise ImportError(
|
|
162
|
+
'your-package is required for YourServiceClient. '
|
|
163
|
+
'Install it with: pip install graphiti-core[your-service]'
|
|
164
|
+
) from None
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
3. **Benefits of this pattern**:
|
|
168
|
+
- Fast startup times (no import overhead during type checking)
|
|
169
|
+
- Clear error messages with installation instructions
|
|
170
|
+
- Proper type hints for development
|
|
171
|
+
- Consistent user experience
|
|
172
|
+
|
|
173
|
+
4. **Do NOT**:
|
|
174
|
+
- Add optional imports to `__init__.py` files
|
|
175
|
+
- Use direct imports without error handling
|
|
176
|
+
- Include optional dependencies in the main `dependencies` list
|
|
177
|
+
|
|
178
|
+
### Integration Structure
|
|
179
|
+
|
|
180
|
+
- Place LLM clients in `graphiti_core/llm_client/`
|
|
181
|
+
- Place embedding clients in `graphiti_core/embedder/`
|
|
182
|
+
- Place database drivers in `graphiti_core/driver/`
|
|
183
|
+
- Follow existing naming conventions (e.g., `your_service_client.py`)
|
|
184
|
+
|
|
185
|
+
### Testing
|
|
186
|
+
|
|
187
|
+
- Add comprehensive tests in the appropriate `tests/` subdirectory
|
|
188
|
+
- Mark integration tests with `_int` suffix if they require external services
|
|
189
|
+
- Include both unit tests and integration tests where applicable
|
|
190
|
+
|
|
130
191
|
# Questions?
|
|
131
192
|
|
|
132
193
|
Stuck on a contribution or have a half-formed idea? Come say hello in our [Discord server](https://discord.com/invite/W8Kw6bsgXQ). Whether you're ready to contribute or just want to learn more, we're happy to have you! It's faster than GitHub issues and you'll find both maintainers and fellow contributors ready to help.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: graphiti-core
|
|
3
|
-
Version: 0.15.
|
|
3
|
+
Version: 0.15.1
|
|
4
4
|
Summary: A temporal graph building library
|
|
5
5
|
Project-URL: Homepage, https://help.getzep.com/graphiti/graphiti/overview
|
|
6
6
|
Project-URL: Repository, https://github.com/getzep/graphiti
|
|
@@ -21,6 +21,7 @@ Requires-Dist: anthropic>=0.49.0; extra == 'anthropic'
|
|
|
21
21
|
Provides-Extra: dev
|
|
22
22
|
Requires-Dist: anthropic>=0.49.0; extra == 'dev'
|
|
23
23
|
Requires-Dist: diskcache-stubs>=5.6.3.6.20240818; extra == 'dev'
|
|
24
|
+
Requires-Dist: falkordb<2.0.0,>=1.1.2; extra == 'dev'
|
|
24
25
|
Requires-Dist: google-genai>=1.8.0; extra == 'dev'
|
|
25
26
|
Requires-Dist: groq>=0.2.0; extra == 'dev'
|
|
26
27
|
Requires-Dist: ipykernel>=6.29.5; extra == 'dev'
|
|
@@ -37,12 +38,16 @@ Requires-Dist: ruff>=0.7.1; extra == 'dev'
|
|
|
37
38
|
Requires-Dist: sentence-transformers>=3.2.1; extra == 'dev'
|
|
38
39
|
Requires-Dist: transformers>=4.45.2; extra == 'dev'
|
|
39
40
|
Requires-Dist: voyageai>=0.2.3; extra == 'dev'
|
|
40
|
-
Provides-Extra:
|
|
41
|
-
Requires-Dist: falkordb<2.0.0,>=1.1.2; extra == '
|
|
41
|
+
Provides-Extra: falkordb
|
|
42
|
+
Requires-Dist: falkordb<2.0.0,>=1.1.2; extra == 'falkordb'
|
|
42
43
|
Provides-Extra: google-genai
|
|
43
44
|
Requires-Dist: google-genai>=1.8.0; extra == 'google-genai'
|
|
44
45
|
Provides-Extra: groq
|
|
45
46
|
Requires-Dist: groq>=0.2.0; extra == 'groq'
|
|
47
|
+
Provides-Extra: sentence-transformers
|
|
48
|
+
Requires-Dist: sentence-transformers>=3.2.1; extra == 'sentence-transformers'
|
|
49
|
+
Provides-Extra: voyageai
|
|
50
|
+
Requires-Dist: voyageai>=0.2.3; extra == 'voyageai'
|
|
46
51
|
Description-Content-Type: text/markdown
|
|
47
52
|
|
|
48
53
|
<p align="center">
|
|
@@ -184,7 +189,18 @@ or
|
|
|
184
189
|
uv add graphiti-core
|
|
185
190
|
```
|
|
186
191
|
|
|
187
|
-
|
|
192
|
+
### Installing with FalkorDB Support
|
|
193
|
+
|
|
194
|
+
If you plan to use FalkorDB as your graph database backend, install with the FalkorDB extra:
|
|
195
|
+
|
|
196
|
+
```bash
|
|
197
|
+
pip install graphiti-core[falkordb]
|
|
198
|
+
|
|
199
|
+
# or with uv
|
|
200
|
+
uv add graphiti-core[falkordb]
|
|
201
|
+
```
|
|
202
|
+
|
|
203
|
+
### You can also install optional LLM providers as extras:
|
|
188
204
|
|
|
189
205
|
```bash
|
|
190
206
|
# Install with Anthropic support
|
|
@@ -198,6 +214,9 @@ pip install graphiti-core[google-genai]
|
|
|
198
214
|
|
|
199
215
|
# Install with multiple providers
|
|
200
216
|
pip install graphiti-core[anthropic,groq,google-genai]
|
|
217
|
+
|
|
218
|
+
# Install with FalkorDB and LLM providers
|
|
219
|
+
pip install graphiti-core[falkordb,anthropic,google-genai]
|
|
201
220
|
```
|
|
202
221
|
|
|
203
222
|
## Quick Start
|
|
@@ -137,7 +137,18 @@ or
|
|
|
137
137
|
uv add graphiti-core
|
|
138
138
|
```
|
|
139
139
|
|
|
140
|
-
|
|
140
|
+
### Installing with FalkorDB Support
|
|
141
|
+
|
|
142
|
+
If you plan to use FalkorDB as your graph database backend, install with the FalkorDB extra:
|
|
143
|
+
|
|
144
|
+
```bash
|
|
145
|
+
pip install graphiti-core[falkordb]
|
|
146
|
+
|
|
147
|
+
# or with uv
|
|
148
|
+
uv add graphiti-core[falkordb]
|
|
149
|
+
```
|
|
150
|
+
|
|
151
|
+
### You can also install optional LLM providers as extras:
|
|
141
152
|
|
|
142
153
|
```bash
|
|
143
154
|
# Install with Anthropic support
|
|
@@ -151,6 +162,9 @@ pip install graphiti-core[google-genai]
|
|
|
151
162
|
|
|
152
163
|
# Install with multiple providers
|
|
153
164
|
pip install graphiti-core[anthropic,groq,google-genai]
|
|
165
|
+
|
|
166
|
+
# Install with FalkorDB and LLM providers
|
|
167
|
+
pip install graphiti-core[falkordb,anthropic,google-genai]
|
|
154
168
|
```
|
|
155
169
|
|
|
156
170
|
## Quick Start
|
|
@@ -49,7 +49,7 @@ load_dotenv()
|
|
|
49
49
|
# Make sure FalkorDB (on-premises) is running — see https://docs.falkordb.com/
|
|
50
50
|
# By default, FalkorDB does not require a username or password,
|
|
51
51
|
# but you can set them via environment variables for added security.
|
|
52
|
-
#
|
|
52
|
+
#
|
|
53
53
|
# If you're using FalkorDB Cloud, set the environment variables accordingly.
|
|
54
54
|
# For on-premises use, you can leave them as None or set them to your preferred values.
|
|
55
55
|
#
|
|
@@ -61,6 +61,7 @@ falkor_password = os.environ.get('FALKORDB_PASSWORD', None)
|
|
|
61
61
|
falkor_host = os.environ.get('FALKORDB_HOST', 'localhost')
|
|
62
62
|
falkor_port = os.environ.get('FALKORDB_PORT', '6379')
|
|
63
63
|
|
|
64
|
+
|
|
64
65
|
async def main():
|
|
65
66
|
#################################################
|
|
66
67
|
# INITIALIZATION
|
|
@@ -71,7 +72,9 @@ async def main():
|
|
|
71
72
|
#################################################
|
|
72
73
|
|
|
73
74
|
# Initialize Graphiti with FalkorDB connection
|
|
74
|
-
falkor_driver = FalkorDriver(
|
|
75
|
+
falkor_driver = FalkorDriver(
|
|
76
|
+
host=falkor_host, port=falkor_port, username=falkor_username, password=falkor_password
|
|
77
|
+
)
|
|
75
78
|
graphiti = Graphiti(graph_driver=falkor_driver)
|
|
76
79
|
|
|
77
80
|
try:
|
|
@@ -15,7 +15,6 @@ limitations under the License.
|
|
|
15
15
|
"""
|
|
16
16
|
|
|
17
17
|
from .client import CrossEncoderClient
|
|
18
|
-
from .gemini_reranker_client import GeminiRerankerClient
|
|
19
18
|
from .openai_reranker_client import OpenAIRerankerClient
|
|
20
19
|
|
|
21
|
-
__all__ = ['CrossEncoderClient', '
|
|
20
|
+
__all__ = ['CrossEncoderClient', 'OpenAIRerankerClient']
|
{graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/cross_encoder/bge_reranker_client.py
RENAMED
|
@@ -15,8 +15,18 @@ limitations under the License.
|
|
|
15
15
|
"""
|
|
16
16
|
|
|
17
17
|
import asyncio
|
|
18
|
-
|
|
19
|
-
|
|
18
|
+
from typing import TYPE_CHECKING
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from sentence_transformers import CrossEncoder
|
|
22
|
+
else:
|
|
23
|
+
try:
|
|
24
|
+
from sentence_transformers import CrossEncoder
|
|
25
|
+
except ImportError:
|
|
26
|
+
raise ImportError(
|
|
27
|
+
'sentence-transformers is required for BGERerankerClient. '
|
|
28
|
+
'Install it with: pip install graphiti-core[sentence-transformers]'
|
|
29
|
+
) from None
|
|
20
30
|
|
|
21
31
|
from graphiti_core.cross_encoder.client import CrossEncoderClient
|
|
22
32
|
|
{graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/cross_encoder/gemini_reranker_client.py
RENAMED
|
@@ -16,14 +16,25 @@ limitations under the License.
|
|
|
16
16
|
|
|
17
17
|
import logging
|
|
18
18
|
import re
|
|
19
|
-
|
|
20
|
-
from google import genai # type: ignore
|
|
21
|
-
from google.genai import types # type: ignore
|
|
19
|
+
from typing import TYPE_CHECKING
|
|
22
20
|
|
|
23
21
|
from ..helpers import semaphore_gather
|
|
24
22
|
from ..llm_client import LLMConfig, RateLimitError
|
|
25
23
|
from .client import CrossEncoderClient
|
|
26
24
|
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from google import genai
|
|
27
|
+
from google.genai import types
|
|
28
|
+
else:
|
|
29
|
+
try:
|
|
30
|
+
from google import genai
|
|
31
|
+
from google.genai import types
|
|
32
|
+
except ImportError:
|
|
33
|
+
raise ImportError(
|
|
34
|
+
'google-genai is required for GeminiRerankerClient. '
|
|
35
|
+
'Install it with: pip install graphiti-core[google-genai]'
|
|
36
|
+
) from None
|
|
37
|
+
|
|
27
38
|
logger = logging.getLogger(__name__)
|
|
28
39
|
|
|
29
40
|
DEFAULT_MODEL = 'gemini-2.5-flash-lite-preview-06-17'
|
|
@@ -33,7 +44,7 @@ class GeminiRerankerClient(CrossEncoderClient):
|
|
|
33
44
|
def __init__(
|
|
34
45
|
self,
|
|
35
46
|
config: LLMConfig | None = None,
|
|
36
|
-
client: genai.Client | None = None,
|
|
47
|
+
client: 'genai.Client | None' = None,
|
|
37
48
|
):
|
|
38
49
|
"""
|
|
39
50
|
Initialize the GeminiRerankerClient with the provided configuration and client.
|
|
@@ -46,6 +57,7 @@ class GeminiRerankerClient(CrossEncoderClient):
|
|
|
46
57
|
config (LLMConfig | None): The configuration for the LLM client, including API key, model, base URL, temperature, and max tokens.
|
|
47
58
|
client (genai.Client | None): An optional async client instance to use. If not provided, a new genai.Client is created.
|
|
48
59
|
"""
|
|
60
|
+
|
|
49
61
|
if config is None:
|
|
50
62
|
config = LLMConfig()
|
|
51
63
|
|
{graphiti_core-0.15.0 → graphiti_core-0.15.1}/graphiti_core/cross_encoder/openai_reranker_client.py
RENAMED
|
@@ -22,7 +22,7 @@ import openai
|
|
|
22
22
|
from openai import AsyncAzureOpenAI, AsyncOpenAI
|
|
23
23
|
|
|
24
24
|
from ..helpers import semaphore_gather
|
|
25
|
-
from ..llm_client import LLMConfig, RateLimitError
|
|
25
|
+
from ..llm_client import LLMConfig, OpenAIClient, RateLimitError
|
|
26
26
|
from ..prompts import Message
|
|
27
27
|
from .client import CrossEncoderClient
|
|
28
28
|
|
|
@@ -35,7 +35,7 @@ class OpenAIRerankerClient(CrossEncoderClient):
|
|
|
35
35
|
def __init__(
|
|
36
36
|
self,
|
|
37
37
|
config: LLMConfig | None = None,
|
|
38
|
-
client: AsyncOpenAI | AsyncAzureOpenAI | None = None,
|
|
38
|
+
client: AsyncOpenAI | AsyncAzureOpenAI | OpenAIClient | None = None,
|
|
39
39
|
):
|
|
40
40
|
"""
|
|
41
41
|
Initialize the OpenAIRerankerClient with the provided configuration and client.
|
|
@@ -45,7 +45,7 @@ class OpenAIRerankerClient(CrossEncoderClient):
|
|
|
45
45
|
|
|
46
46
|
Args:
|
|
47
47
|
config (LLMConfig | None): The configuration for the LLM client, including API key, model, base URL, temperature, and max tokens.
|
|
48
|
-
client (AsyncOpenAI | AsyncAzureOpenAI | None): An optional async client instance to use. If not provided, a new AsyncOpenAI client is created.
|
|
48
|
+
client (AsyncOpenAI | AsyncAzureOpenAI | OpenAIClient | None): An optional async client instance to use. If not provided, a new AsyncOpenAI client is created.
|
|
49
49
|
"""
|
|
50
50
|
if config is None:
|
|
51
51
|
config = LLMConfig()
|
|
@@ -53,6 +53,8 @@ class OpenAIRerankerClient(CrossEncoderClient):
|
|
|
53
53
|
self.config = config
|
|
54
54
|
if client is None:
|
|
55
55
|
self.client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
|
|
56
|
+
elif isinstance(client, OpenAIClient):
|
|
57
|
+
self.client = client.client
|
|
56
58
|
else:
|
|
57
59
|
self.client = client
|
|
58
60
|
|
|
@@ -16,10 +16,21 @@ limitations under the License.
|
|
|
16
16
|
|
|
17
17
|
import logging
|
|
18
18
|
from datetime import datetime
|
|
19
|
-
from typing import Any
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
from falkordb
|
|
19
|
+
from typing import TYPE_CHECKING, Any
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from falkordb import Graph as FalkorGraph
|
|
23
|
+
from falkordb.asyncio import FalkorDB
|
|
24
|
+
else:
|
|
25
|
+
try:
|
|
26
|
+
from falkordb import Graph as FalkorGraph
|
|
27
|
+
from falkordb.asyncio import FalkorDB
|
|
28
|
+
except ImportError:
|
|
29
|
+
# If falkordb is not installed, raise an ImportError
|
|
30
|
+
raise ImportError(
|
|
31
|
+
'falkordb is required for FalkorDriver. '
|
|
32
|
+
'Install it with: pip install graphiti-core[falkordb]'
|
|
33
|
+
) from None
|
|
23
34
|
|
|
24
35
|
from graphiti_core.driver.driver import GraphDriver, GraphDriverSession
|
|
25
36
|
from graphiti_core.helpers import DEFAULT_DATABASE
|
|
@@ -15,9 +15,21 @@ limitations under the License.
|
|
|
15
15
|
"""
|
|
16
16
|
|
|
17
17
|
from collections.abc import Iterable
|
|
18
|
+
from typing import TYPE_CHECKING
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from google import genai
|
|
22
|
+
from google.genai import types
|
|
23
|
+
else:
|
|
24
|
+
try:
|
|
25
|
+
from google import genai
|
|
26
|
+
from google.genai import types
|
|
27
|
+
except ImportError:
|
|
28
|
+
raise ImportError(
|
|
29
|
+
'google-genai is required for GeminiEmbedder. '
|
|
30
|
+
'Install it with: pip install graphiti-core[google-genai]'
|
|
31
|
+
) from None
|
|
18
32
|
|
|
19
|
-
from google import genai # type: ignore
|
|
20
|
-
from google.genai import types # type: ignore
|
|
21
33
|
from pydantic import Field
|
|
22
34
|
|
|
23
35
|
from .client import EmbedderClient, EmbedderConfig
|
|
@@ -15,8 +15,19 @@ limitations under the License.
|
|
|
15
15
|
"""
|
|
16
16
|
|
|
17
17
|
from collections.abc import Iterable
|
|
18
|
+
from typing import TYPE_CHECKING
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
import voyageai
|
|
22
|
+
else:
|
|
23
|
+
try:
|
|
24
|
+
import voyageai
|
|
25
|
+
except ImportError:
|
|
26
|
+
raise ImportError(
|
|
27
|
+
'voyageai is required for VoyageAIEmbedderClient. '
|
|
28
|
+
'Install it with: pip install graphiti-core[voyageai]'
|
|
29
|
+
) from None
|
|
18
30
|
|
|
19
|
-
import voyageai # type: ignore
|
|
20
31
|
from pydantic import Field
|
|
21
32
|
|
|
22
33
|
from .client import EmbedderClient, EmbedderConfig
|
|
@@ -166,7 +166,7 @@ class Graphiti:
|
|
|
166
166
|
self.driver = graph_driver
|
|
167
167
|
else:
|
|
168
168
|
if uri is None:
|
|
169
|
-
raise ValueError(
|
|
169
|
+
raise ValueError('uri must be provided when graph_driver is None')
|
|
170
170
|
self.driver = Neo4jDriver(uri, user, password)
|
|
171
171
|
|
|
172
172
|
self.database = DEFAULT_DATABASE
|
|
@@ -19,11 +19,8 @@ import logging
|
|
|
19
19
|
import os
|
|
20
20
|
import typing
|
|
21
21
|
from json import JSONDecodeError
|
|
22
|
-
from typing import Literal
|
|
22
|
+
from typing import TYPE_CHECKING, Literal
|
|
23
23
|
|
|
24
|
-
import anthropic
|
|
25
|
-
from anthropic import AsyncAnthropic
|
|
26
|
-
from anthropic.types import MessageParam, ToolChoiceParam, ToolUnionParam
|
|
27
24
|
from pydantic import BaseModel, ValidationError
|
|
28
25
|
|
|
29
26
|
from ..prompts.models import Message
|
|
@@ -31,6 +28,22 @@ from .client import LLMClient
|
|
|
31
28
|
from .config import DEFAULT_MAX_TOKENS, LLMConfig, ModelSize
|
|
32
29
|
from .errors import RateLimitError, RefusalError
|
|
33
30
|
|
|
31
|
+
if TYPE_CHECKING:
|
|
32
|
+
import anthropic
|
|
33
|
+
from anthropic import AsyncAnthropic
|
|
34
|
+
from anthropic.types import MessageParam, ToolChoiceParam, ToolUnionParam
|
|
35
|
+
else:
|
|
36
|
+
try:
|
|
37
|
+
import anthropic
|
|
38
|
+
from anthropic import AsyncAnthropic
|
|
39
|
+
from anthropic.types import MessageParam, ToolChoiceParam, ToolUnionParam
|
|
40
|
+
except ImportError:
|
|
41
|
+
raise ImportError(
|
|
42
|
+
'anthropic is required for AnthropicClient. '
|
|
43
|
+
'Install it with: pip install graphiti-core[anthropic]'
|
|
44
|
+
) from None
|
|
45
|
+
|
|
46
|
+
|
|
34
47
|
logger = logging.getLogger(__name__)
|
|
35
48
|
|
|
36
49
|
AnthropicModel = Literal[
|
|
@@ -17,10 +17,8 @@ limitations under the License.
|
|
|
17
17
|
import json
|
|
18
18
|
import logging
|
|
19
19
|
import typing
|
|
20
|
-
from typing import ClassVar
|
|
20
|
+
from typing import TYPE_CHECKING, ClassVar
|
|
21
21
|
|
|
22
|
-
from google import genai # type: ignore
|
|
23
|
-
from google.genai import types # type: ignore
|
|
24
22
|
from pydantic import BaseModel
|
|
25
23
|
|
|
26
24
|
from ..prompts.models import Message
|
|
@@ -28,6 +26,21 @@ from .client import MULTILINGUAL_EXTRACTION_RESPONSES, LLMClient
|
|
|
28
26
|
from .config import DEFAULT_MAX_TOKENS, LLMConfig, ModelSize
|
|
29
27
|
from .errors import RateLimitError
|
|
30
28
|
|
|
29
|
+
if TYPE_CHECKING:
|
|
30
|
+
from google import genai
|
|
31
|
+
from google.genai import types
|
|
32
|
+
else:
|
|
33
|
+
try:
|
|
34
|
+
from google import genai
|
|
35
|
+
from google.genai import types
|
|
36
|
+
except ImportError:
|
|
37
|
+
# If gemini client is not installed, raise an ImportError
|
|
38
|
+
raise ImportError(
|
|
39
|
+
'google-genai is required for GeminiClient. '
|
|
40
|
+
'Install it with: pip install graphiti-core[google-genai]'
|
|
41
|
+
) from None
|
|
42
|
+
|
|
43
|
+
|
|
31
44
|
logger = logging.getLogger(__name__)
|
|
32
45
|
|
|
33
46
|
DEFAULT_MODEL = 'gemini-2.5-flash'
|
|
@@ -17,10 +17,21 @@ limitations under the License.
|
|
|
17
17
|
import json
|
|
18
18
|
import logging
|
|
19
19
|
import typing
|
|
20
|
+
from typing import TYPE_CHECKING
|
|
20
21
|
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
from groq
|
|
22
|
+
if TYPE_CHECKING:
|
|
23
|
+
import groq
|
|
24
|
+
from groq import AsyncGroq
|
|
25
|
+
from groq.types.chat import ChatCompletionMessageParam
|
|
26
|
+
else:
|
|
27
|
+
try:
|
|
28
|
+
import groq
|
|
29
|
+
from groq import AsyncGroq
|
|
30
|
+
from groq.types.chat import ChatCompletionMessageParam
|
|
31
|
+
except ImportError:
|
|
32
|
+
raise ImportError(
|
|
33
|
+
'groq is required for GroqClient. Install it with: pip install graphiti-core[groq]'
|
|
34
|
+
) from None
|
|
24
35
|
from pydantic import BaseModel
|
|
25
36
|
|
|
26
37
|
from ..prompts.models import Message
|
|
@@ -542,12 +542,12 @@ class CommunityNode(Node):
|
|
|
542
542
|
def get_episodic_node_from_record(record: Any) -> EpisodicNode:
|
|
543
543
|
created_at = parse_db_date(record['created_at'])
|
|
544
544
|
valid_at = parse_db_date(record['valid_at'])
|
|
545
|
-
|
|
545
|
+
|
|
546
546
|
if created_at is None:
|
|
547
|
-
raise ValueError(f
|
|
547
|
+
raise ValueError(f'created_at cannot be None for episode {record.get("uuid", "unknown")}')
|
|
548
548
|
if valid_at is None:
|
|
549
|
-
raise ValueError(f
|
|
550
|
-
|
|
549
|
+
raise ValueError(f'valid_at cannot be None for episode {record.get("uuid", "unknown")}')
|
|
550
|
+
|
|
551
551
|
return EpisodicNode(
|
|
552
552
|
content=record['content'],
|
|
553
553
|
created_at=created_at,
|
|
@@ -539,7 +539,8 @@ async def community_fulltext_search(
|
|
|
539
539
|
comm.group_id AS group_id,
|
|
540
540
|
comm.name AS name,
|
|
541
541
|
comm.created_at AS created_at,
|
|
542
|
-
comm.summary AS summary
|
|
542
|
+
comm.summary AS summary,
|
|
543
|
+
comm.name_embedding AS name_embedding
|
|
543
544
|
ORDER BY score DESC
|
|
544
545
|
LIMIT $limit
|
|
545
546
|
"""
|
|
@@ -589,7 +590,8 @@ async def community_similarity_search(
|
|
|
589
590
|
comm.group_id AS group_id,
|
|
590
591
|
comm.name AS name,
|
|
591
592
|
comm.created_at AS created_at,
|
|
592
|
-
comm.summary AS summary
|
|
593
|
+
comm.summary AS summary,
|
|
594
|
+
comm.name_embedding AS name_embedding
|
|
593
595
|
ORDER BY score DESC
|
|
594
596
|
LIMIT $limit
|
|
595
597
|
"""
|
|
@@ -140,7 +140,8 @@ async def retrieve_episodes(
|
|
|
140
140
|
episodes = [
|
|
141
141
|
EpisodicNode(
|
|
142
142
|
content=record['content'],
|
|
143
|
-
created_at=parse_db_date(record['created_at'])
|
|
143
|
+
created_at=parse_db_date(record['created_at'])
|
|
144
|
+
or datetime.min.replace(tzinfo=timezone.utc),
|
|
144
145
|
valid_at=parse_db_date(record['valid_at']) or datetime.min.replace(tzinfo=timezone.utc),
|
|
145
146
|
uuid=record['uuid'],
|
|
146
147
|
group_id=record['group_id'],
|
|
@@ -21,9 +21,9 @@ The Graphiti MCP server exposes the following key high-level functions of Graphi
|
|
|
21
21
|
- **Group Management**: Organize and manage groups of related data with group_id filtering
|
|
22
22
|
- **Graph Maintenance**: Clear the graph and rebuild indices
|
|
23
23
|
|
|
24
|
-
## Quick Start
|
|
24
|
+
## Quick Start
|
|
25
25
|
|
|
26
|
-
|
|
26
|
+
### Clone the Graphiti GitHub repo
|
|
27
27
|
|
|
28
28
|
```bash
|
|
29
29
|
git clone https://github.com/getzep/graphiti.git
|
|
@@ -35,7 +35,9 @@ or
|
|
|
35
35
|
gh repo clone getzep/graphiti
|
|
36
36
|
```
|
|
37
37
|
|
|
38
|
-
|
|
38
|
+
### For Claude Desktop and other `stdio` only clients
|
|
39
|
+
|
|
40
|
+
1. Note the full path to this directory.
|
|
39
41
|
|
|
40
42
|
```
|
|
41
43
|
cd graphiti && pwd
|
|
@@ -45,6 +47,18 @@ cd graphiti && pwd
|
|
|
45
47
|
|
|
46
48
|
3. Configure Claude, Cursor, or other MCP client to use [Graphiti with a `stdio` transport](#integrating-with-mcp-clients). See the client documentation on where to find their MCP configuration files.
|
|
47
49
|
|
|
50
|
+
### For Cursor and other `sse`-enabled clients
|
|
51
|
+
|
|
52
|
+
1. Change directory to the `mcp_server` directory
|
|
53
|
+
|
|
54
|
+
`cd graphiti/mcp_server`
|
|
55
|
+
|
|
56
|
+
2. Start the service using Docker Compose
|
|
57
|
+
|
|
58
|
+
`docker compose up`
|
|
59
|
+
|
|
60
|
+
3. Point your MCP client to `http://localhost:8000/sse`
|
|
61
|
+
|
|
48
62
|
## Installation
|
|
49
63
|
|
|
50
64
|
### Prerequisites
|
|
@@ -163,6 +177,8 @@ The Docker Compose setup includes a Neo4j container with the following default c
|
|
|
163
177
|
|
|
164
178
|
#### Running with Docker Compose
|
|
165
179
|
|
|
180
|
+
A Graphiti MCP container is available at: `zepai/knowledge-graph-mcp`. The latest build of this container is used by the Compose setup below.
|
|
181
|
+
|
|
166
182
|
Start the services using Docker Compose:
|
|
167
183
|
|
|
168
184
|
```bash
|