graphiti-core 0.14.0__tar.gz → 0.15.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphiti-core might be problematic. Click here for more details.

Files changed (183) hide show
  1. graphiti_core-0.15.1/.github/workflows/mcp-server-docker.yml +73 -0
  2. graphiti_core-0.15.1/.github/workflows/typecheck.yml +42 -0
  3. graphiti_core-0.15.1/.github/workflows/unit_tests.yml +51 -0
  4. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/CONTRIBUTING.md +62 -1
  5. {graphiti_core-0.14.0/server → graphiti_core-0.15.1}/Makefile +2 -2
  6. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/PKG-INFO +66 -24
  7. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/README.md +57 -20
  8. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/quickstart/quickstart_falkordb.py +18 -9
  9. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/cross_encoder/bge_reranker_client.py +12 -2
  10. graphiti_core-0.15.1/graphiti_core/cross_encoder/gemini_reranker_client.py +158 -0
  11. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/cross_encoder/openai_reranker_client.py +5 -3
  12. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/driver/__init__.py +3 -1
  13. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/driver/falkordb_driver.py +62 -25
  14. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/driver/neo4j_driver.py +5 -3
  15. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/embedder/gemini.py +14 -2
  16. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/embedder/voyage.py +13 -2
  17. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/graphiti.py +7 -2
  18. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/helpers.py +3 -2
  19. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/anthropic_client.py +17 -4
  20. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/gemini_client.py +150 -25
  21. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/groq_client.py +14 -3
  22. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/nodes.py +10 -2
  23. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/search/search_filters.py +4 -5
  24. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/search/search_utils.py +5 -9
  25. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/community_operations.py +1 -1
  26. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/edge_operations.py +1 -1
  27. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/graph_data_operations.py +4 -5
  28. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/mcp_server/Dockerfile +1 -1
  29. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/mcp_server/README.md +19 -3
  30. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/mcp_server/docker-compose.yml +4 -3
  31. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/mcp_server/graphiti_mcp_server.py +41 -32
  32. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/mcp_server/pyproject.toml +2 -2
  33. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/mcp_server/uv.lock +43 -4
  34. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/poetry.lock +1508 -1327
  35. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/pyproject.toml +13 -12
  36. graphiti_core-0.15.1/pytest.ini +4 -0
  37. {graphiti_core-0.14.0 → graphiti_core-0.15.1/server}/Makefile +2 -2
  38. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/graph_service/config.py +1 -1
  39. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/pyproject.toml +6 -1
  40. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/uv.lock +24 -59
  41. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/signatures/version1/cla.json +32 -0
  42. graphiti_core-0.15.1/tests/cross_encoder/test_gemini_reranker_client.py +353 -0
  43. graphiti_core-0.15.1/tests/driver/__init__.py +1 -0
  44. graphiti_core-0.15.1/tests/driver/test_falkordb_driver.py +397 -0
  45. graphiti_core-0.15.1/tests/embedder/test_gemini.py +381 -0
  46. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/helpers_test.py +1 -1
  47. graphiti_core-0.15.1/tests/llm_client/test_gemini_client.py +393 -0
  48. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/test_entity_exclusion_int.py +105 -97
  49. graphiti_core-0.15.1/tests/test_graphiti_falkordb_int.py +164 -0
  50. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/test_graphiti_int.py +6 -1
  51. graphiti_core-0.15.1/tests/test_node_falkordb_int.py +139 -0
  52. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/uv.lock +38 -63
  53. graphiti_core-0.14.0/.github/workflows/typecheck.yml +0 -47
  54. graphiti_core-0.14.0/.github/workflows/unit_tests.yml +0 -30
  55. graphiti_core-0.14.0/pytest.ini +0 -3
  56. graphiti_core-0.14.0/tests/embedder/test_gemini.py +0 -127
  57. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/.env.example +0 -0
  58. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/.github/dependabot.yml +0 -0
  59. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/.github/secret_scanning.yml +0 -0
  60. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/.github/workflows/cla.yml +0 -0
  61. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/.github/workflows/codeql.yml +0 -0
  62. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/.github/workflows/lint.yml +0 -0
  63. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/.github/workflows/release-graphiti-core.yml +0 -0
  64. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/.gitignore +0 -0
  65. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/CLAUDE.md +0 -0
  66. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/CODE_OF_CONDUCT.md +0 -0
  67. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/Dockerfile +0 -0
  68. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/LICENSE +0 -0
  69. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/SECURITY.md +0 -0
  70. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/Zep-CLA.md +0 -0
  71. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/conftest.py +0 -0
  72. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/depot.json +0 -0
  73. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/docker-compose.test.yml +0 -0
  74. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/docker-compose.yml +0 -0
  75. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/ellipsis.yaml +0 -0
  76. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/data/manybirds_products.json +0 -0
  77. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/ecommerce/runner.ipynb +0 -0
  78. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/ecommerce/runner.py +0 -0
  79. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/langgraph-agent/agent.ipynb +0 -0
  80. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/langgraph-agent/tinybirds-jess.png +0 -0
  81. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/podcast/podcast_runner.py +0 -0
  82. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/podcast/podcast_transcript.txt +0 -0
  83. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/podcast/transcript_parser.py +0 -0
  84. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/quickstart/README.md +0 -0
  85. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/quickstart/quickstart_neo4j.py +0 -0
  86. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/quickstart/requirements.txt +0 -0
  87. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/wizard_of_oz/parser.py +0 -0
  88. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/wizard_of_oz/runner.py +0 -0
  89. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/examples/wizard_of_oz/woo.txt +0 -0
  90. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/__init__.py +0 -0
  91. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/cross_encoder/__init__.py +0 -0
  92. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/cross_encoder/client.py +0 -0
  93. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/driver/driver.py +0 -0
  94. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/edges.py +0 -0
  95. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/embedder/__init__.py +0 -0
  96. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/embedder/azure_openai.py +0 -0
  97. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/embedder/client.py +0 -0
  98. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/embedder/openai.py +0 -0
  99. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/errors.py +0 -0
  100. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/graph_queries.py +0 -0
  101. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/graphiti_types.py +0 -0
  102. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/__init__.py +0 -0
  103. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/azure_openai_client.py +0 -0
  104. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/client.py +0 -0
  105. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/config.py +0 -0
  106. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/errors.py +0 -0
  107. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/openai_base_client.py +0 -0
  108. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/openai_client.py +0 -0
  109. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/openai_generic_client.py +0 -0
  110. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/llm_client/utils.py +0 -0
  111. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/models/__init__.py +0 -0
  112. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/models/edges/__init__.py +0 -0
  113. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/models/edges/edge_db_queries.py +0 -0
  114. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/models/nodes/__init__.py +0 -0
  115. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/models/nodes/node_db_queries.py +0 -0
  116. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/prompts/__init__.py +0 -0
  117. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/prompts/dedupe_edges.py +0 -0
  118. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/prompts/dedupe_nodes.py +0 -0
  119. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/prompts/eval.py +0 -0
  120. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/prompts/extract_edge_dates.py +0 -0
  121. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/prompts/extract_edges.py +0 -0
  122. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/prompts/extract_nodes.py +0 -0
  123. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/prompts/invalidate_edges.py +0 -0
  124. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/prompts/lib.py +0 -0
  125. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/prompts/models.py +0 -0
  126. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/prompts/prompt_helpers.py +0 -0
  127. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/prompts/summarize_nodes.py +0 -0
  128. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/py.typed +0 -0
  129. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/search/__init__.py +0 -0
  130. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/search/search.py +0 -0
  131. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/search/search_config.py +0 -0
  132. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/search/search_config_recipes.py +0 -0
  133. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/search/search_helpers.py +0 -0
  134. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/telemetry/__init__.py +0 -0
  135. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/telemetry/telemetry.py +0 -0
  136. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/utils/__init__.py +0 -0
  137. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/utils/bulk_utils.py +0 -0
  138. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/utils/datetime_utils.py +0 -0
  139. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/__init__.py +0 -0
  140. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/node_operations.py +0 -0
  141. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/temporal_operations.py +0 -0
  142. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/utils/maintenance/utils.py +0 -0
  143. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/graphiti_core/utils/ontology_utils/entity_types_utils.py +0 -0
  144. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/images/arxiv-screenshot.png +0 -0
  145. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/images/graphiti-graph-intro.gif +0 -0
  146. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/images/graphiti-intro-slides-stock-2.gif +0 -0
  147. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/images/simple_graph.svg +0 -0
  148. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/mcp_server/.env.example +0 -0
  149. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/mcp_server/.python-version +0 -0
  150. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/mcp_server/cursor_rules.md +0 -0
  151. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/mcp_server/mcp_config_sse_example.json +0 -0
  152. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/mcp_server/mcp_config_stdio_example.json +0 -0
  153. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/py.typed +0 -0
  154. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/.env.example +0 -0
  155. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/README.md +0 -0
  156. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/graph_service/__init__.py +0 -0
  157. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/graph_service/dto/__init__.py +0 -0
  158. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/graph_service/dto/common.py +0 -0
  159. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/graph_service/dto/ingest.py +0 -0
  160. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/graph_service/dto/retrieve.py +0 -0
  161. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/graph_service/main.py +0 -0
  162. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/graph_service/routers/__init__.py +0 -0
  163. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/graph_service/routers/ingest.py +0 -0
  164. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/graph_service/routers/retrieve.py +0 -0
  165. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/server/graph_service/zep_graphiti.py +0 -0
  166. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/cross_encoder/test_bge_reranker_client.py +0 -0
  167. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/embedder/embedder_fixtures.py +0 -0
  168. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/embedder/test_openai.py +0 -0
  169. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/embedder/test_voyage.py +0 -0
  170. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/evals/data/longmemeval_data/README.md +0 -0
  171. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/evals/data/longmemeval_data/longmemeval_oracle.json +0 -0
  172. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/evals/eval_cli.py +0 -0
  173. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/evals/eval_e2e_graph_building.py +0 -0
  174. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/evals/pytest.ini +0 -0
  175. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/evals/utils.py +0 -0
  176. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/llm_client/test_anthropic_client.py +0 -0
  177. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/llm_client/test_anthropic_client_int.py +0 -0
  178. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/llm_client/test_client.py +0 -0
  179. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/llm_client/test_errors.py +0 -0
  180. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/test_node_int.py +0 -0
  181. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/utils/maintenance/test_edge_operations.py +0 -0
  182. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/utils/maintenance/test_temporal_operations_int.py +0 -0
  183. {graphiti_core-0.14.0 → graphiti_core-0.15.1}/tests/utils/search/search_utils_test.py +0 -0
@@ -0,0 +1,73 @@
1
+ name: Build and Push MCP Server Docker Image
2
+
3
+ on:
4
+ push:
5
+ paths:
6
+ - "mcp_server/pyproject.toml"
7
+ branches:
8
+ - main
9
+ pull_request:
10
+ paths:
11
+ - "mcp_server/pyproject.toml"
12
+ branches:
13
+ - main
14
+ workflow_dispatch:
15
+ inputs:
16
+ push_image:
17
+ description: "Push image to registry (unchecked for testing)"
18
+ required: false
19
+ default: false
20
+ type: boolean
21
+
22
+ env:
23
+ REGISTRY: docker.io
24
+ IMAGE_NAME: zepai/knowledge-graph-mcp
25
+
26
+ jobs:
27
+ build-and-push:
28
+ runs-on: depot-ubuntu-24.04-small
29
+ environment: development
30
+ permissions:
31
+ contents: read
32
+ id-token: write
33
+ steps:
34
+ - name: Checkout repository
35
+ uses: actions/checkout@v4
36
+
37
+ - name: Extract version from pyproject.toml
38
+ id: version
39
+ run: |
40
+ VERSION=$(python -c "import tomllib; print(tomllib.load(open('mcp_server/pyproject.toml', 'rb'))['project']['version'])")
41
+ echo "version=$VERSION" >> $GITHUB_OUTPUT
42
+ echo "tag=v$VERSION" >> $GITHUB_OUTPUT
43
+ - name: Log in to Docker Hub
44
+ if: github.event_name != 'pull_request' && (github.event_name != 'workflow_dispatch' || inputs.push_image)
45
+ uses: docker/login-action@v3
46
+ with:
47
+ registry: ${{ env.REGISTRY }}
48
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
49
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
50
+
51
+ - name: Set up Depot CLI
52
+ uses: depot/setup-action@v1
53
+
54
+ - name: Extract metadata
55
+ id: meta
56
+ uses: docker/metadata-action@v5
57
+ with:
58
+ images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
59
+ tags: |
60
+ type=ref,event=branch
61
+ type=ref,event=pr
62
+ type=raw,value=${{ steps.version.outputs.tag }}
63
+ type=raw,value=latest,enable={{is_default_branch}}
64
+
65
+ - name: Depot build and push image
66
+ uses: depot/build-push-action@v1
67
+ with:
68
+ project: v9jv1mlpwc
69
+ context: ./mcp_server
70
+ platforms: linux/amd64,linux/arm64
71
+ push: ${{ github.event_name != 'pull_request' && (github.event_name != 'workflow_dispatch' || inputs.push_image) }}
72
+ tags: ${{ steps.meta.outputs.tags }}
73
+ labels: ${{ steps.meta.outputs.labels }}
@@ -0,0 +1,42 @@
1
+ name: Pyright Type Check
2
+
3
+ permissions:
4
+ contents: read
5
+
6
+ on:
7
+ push:
8
+ branches: ["main"]
9
+ pull_request:
10
+ branches: ["main"]
11
+
12
+ jobs:
13
+ pyright:
14
+ runs-on: depot-ubuntu-22.04
15
+ environment: development
16
+ steps:
17
+ - uses: actions/checkout@v4
18
+ - name: Set up Python
19
+ id: setup-python
20
+ uses: actions/setup-python@v5
21
+ with:
22
+ python-version: "3.10"
23
+ - name: Install uv
24
+ uses: astral-sh/setup-uv@v3
25
+ with:
26
+ version: "latest"
27
+ - name: Install dependencies
28
+ run: uv sync --all-extras
29
+ - name: Run Pyright for graphiti-core
30
+ shell: bash
31
+ run: |
32
+ uv run pyright ./graphiti_core
33
+ - name: Install graph-service dependencies
34
+ shell: bash
35
+ run: |
36
+ cd server
37
+ uv sync --all-extras
38
+ - name: Run Pyright for graph-service
39
+ shell: bash
40
+ run: |
41
+ cd server
42
+ uv run pyright .
@@ -0,0 +1,51 @@
1
+ name: Unit Tests
2
+
3
+ on:
4
+ push:
5
+ branches: [main]
6
+ pull_request:
7
+ branches: [main]
8
+
9
+ permissions:
10
+ contents: read
11
+
12
+ jobs:
13
+ test:
14
+ runs-on: depot-ubuntu-22.04
15
+ environment:
16
+ name: development
17
+ services:
18
+ falkordb:
19
+ image: falkordb/falkordb:latest
20
+ ports:
21
+ - 6379:6379
22
+ options: --health-cmd "redis-cli ping" --health-interval 10s --health-timeout 5s --health-retries 5
23
+ steps:
24
+ - uses: actions/checkout@v4
25
+ - name: Set up Python
26
+ uses: actions/setup-python@v5
27
+ with:
28
+ python-version: "3.10"
29
+ - name: Install uv
30
+ uses: astral-sh/setup-uv@v3
31
+ with:
32
+ version: "latest"
33
+ - name: Install redis-cli for FalkorDB health check
34
+ run: sudo apt-get update && sudo apt-get install -y redis-tools
35
+ - name: Install dependencies
36
+ run: uv sync --all-extras
37
+ - name: Run non-integration tests
38
+ env:
39
+ PYTHONPATH: ${{ github.workspace }}
40
+ run: |
41
+ uv run pytest -m "not integration"
42
+ - name: Wait for FalkorDB
43
+ run: |
44
+ timeout 60 bash -c 'until redis-cli -h localhost -p 6379 ping; do sleep 1; done'
45
+ - name: Run FalkorDB integration tests
46
+ env:
47
+ PYTHONPATH: ${{ github.workspace }}
48
+ FALKORDB_HOST: localhost
49
+ FALKORDB_PORT: 6379
50
+ run: |
51
+ uv run pytest tests/driver/test_falkordb_driver.py
@@ -116,7 +116,7 @@ Once you've found an issue tagged with "good first issue" or "help wanted," or p
116
116
  We use several tools to maintain code quality:
117
117
 
118
118
  - Ruff for linting and formatting
119
- - Mypy for static type checking
119
+ - Pyright for static type checking
120
120
  - Pytest for testing
121
121
 
122
122
  Before submitting a pull request, please run:
@@ -127,6 +127,67 @@ make check
127
127
 
128
128
  This command will format your code, run linting checks, and execute tests.
129
129
 
130
+ ## Third-Party Integrations
131
+
132
+ When contributing integrations for third-party services (LLM providers, embedding services, databases, etc.), please follow these patterns:
133
+
134
+ ### Optional Dependencies
135
+
136
+ All third-party integrations must be optional dependencies to keep the core library lightweight. Follow this pattern:
137
+
138
+ 1. **Add to `pyproject.toml`**: Define your dependency as an optional extra AND include it in the dev extra:
139
+ ```toml
140
+ [project.optional-dependencies]
141
+ your-service = ["your-package>=1.0.0"]
142
+ dev = [
143
+ # ... existing dev dependencies
144
+ "your-package>=1.0.0", # Include all optional extras here
145
+ # ... other dependencies
146
+ ]
147
+ ```
148
+
149
+ 2. **Use TYPE_CHECKING pattern**: In your integration module, import dependencies conditionally:
150
+ ```python
151
+ from typing import TYPE_CHECKING
152
+
153
+ if TYPE_CHECKING:
154
+ import your_package
155
+ from your_package import SomeType
156
+ else:
157
+ try:
158
+ import your_package
159
+ from your_package import SomeType
160
+ except ImportError:
161
+ raise ImportError(
162
+ 'your-package is required for YourServiceClient. '
163
+ 'Install it with: pip install graphiti-core[your-service]'
164
+ ) from None
165
+ ```
166
+
167
+ 3. **Benefits of this pattern**:
168
+ - Fast startup times (no import overhead during type checking)
169
+ - Clear error messages with installation instructions
170
+ - Proper type hints for development
171
+ - Consistent user experience
172
+
173
+ 4. **Do NOT**:
174
+ - Add optional imports to `__init__.py` files
175
+ - Use direct imports without error handling
176
+ - Include optional dependencies in the main `dependencies` list
177
+
178
+ ### Integration Structure
179
+
180
+ - Place LLM clients in `graphiti_core/llm_client/`
181
+ - Place embedding clients in `graphiti_core/embedder/`
182
+ - Place database drivers in `graphiti_core/driver/`
183
+ - Follow existing naming conventions (e.g., `your_service_client.py`)
184
+
185
+ ### Testing
186
+
187
+ - Add comprehensive tests in the appropriate `tests/` subdirectory
188
+ - Mark integration tests with `_int` suffix if they require external services
189
+ - Include both unit tests and integration tests where applicable
190
+
130
191
  # Questions?
131
192
 
132
193
  Stuck on a contribution or have a half-formed idea? Come say hello in our [Discord server](https://discord.com/invite/W8Kw6bsgXQ). Whether you're ready to contribute or just want to learn more, we're happy to have you! It's faster than GitHub issues and you'll find both maintainers and fellow contributors ready to help.
@@ -5,7 +5,7 @@ PYTHON = python3
5
5
  UV = uv
6
6
  PYTEST = $(UV) run pytest
7
7
  RUFF = $(UV) run ruff
8
- MYPY = $(UV) run mypy
8
+ PYRIGHT = $(UV) run pyright
9
9
 
10
10
  # Default target
11
11
  all: format lint test
@@ -22,7 +22,7 @@ format:
22
22
  # Lint code
23
23
  lint:
24
24
  $(RUFF) check
25
- $(MYPY) . --show-column-numbers --show-error-codes --pretty
25
+ $(PYRIGHT) ./graphiti_core
26
26
 
27
27
  # Run tests
28
28
  test:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: graphiti-core
3
- Version: 0.14.0
3
+ Version: 0.15.1
4
4
  Summary: A temporal graph building library
5
5
  Project-URL: Homepage, https://help.getzep.com/graphiti/graphiti/overview
6
6
  Project-URL: Repository, https://github.com/getzep/graphiti
@@ -21,6 +21,7 @@ Requires-Dist: anthropic>=0.49.0; extra == 'anthropic'
21
21
  Provides-Extra: dev
22
22
  Requires-Dist: anthropic>=0.49.0; extra == 'dev'
23
23
  Requires-Dist: diskcache-stubs>=5.6.3.6.20240818; extra == 'dev'
24
+ Requires-Dist: falkordb<2.0.0,>=1.1.2; extra == 'dev'
24
25
  Requires-Dist: google-genai>=1.8.0; extra == 'dev'
25
26
  Requires-Dist: groq>=0.2.0; extra == 'dev'
26
27
  Requires-Dist: ipykernel>=6.29.5; extra == 'dev'
@@ -29,7 +30,7 @@ Requires-Dist: langchain-anthropic>=0.2.4; extra == 'dev'
29
30
  Requires-Dist: langchain-openai>=0.2.6; extra == 'dev'
30
31
  Requires-Dist: langgraph>=0.2.15; extra == 'dev'
31
32
  Requires-Dist: langsmith>=0.1.108; extra == 'dev'
32
- Requires-Dist: mypy>=1.11.1; extra == 'dev'
33
+ Requires-Dist: pyright>=1.1.380; extra == 'dev'
33
34
  Requires-Dist: pytest-asyncio>=0.24.0; extra == 'dev'
34
35
  Requires-Dist: pytest-xdist>=3.6.1; extra == 'dev'
35
36
  Requires-Dist: pytest>=8.3.3; extra == 'dev'
@@ -37,12 +38,16 @@ Requires-Dist: ruff>=0.7.1; extra == 'dev'
37
38
  Requires-Dist: sentence-transformers>=3.2.1; extra == 'dev'
38
39
  Requires-Dist: transformers>=4.45.2; extra == 'dev'
39
40
  Requires-Dist: voyageai>=0.2.3; extra == 'dev'
40
- Provides-Extra: falkord-db
41
- Requires-Dist: falkordb<2.0.0,>=1.1.2; extra == 'falkord-db'
41
+ Provides-Extra: falkordb
42
+ Requires-Dist: falkordb<2.0.0,>=1.1.2; extra == 'falkordb'
42
43
  Provides-Extra: google-genai
43
44
  Requires-Dist: google-genai>=1.8.0; extra == 'google-genai'
44
45
  Provides-Extra: groq
45
46
  Requires-Dist: groq>=0.2.0; extra == 'groq'
47
+ Provides-Extra: sentence-transformers
48
+ Requires-Dist: sentence-transformers>=3.2.1; extra == 'sentence-transformers'
49
+ Provides-Extra: voyageai
50
+ Requires-Dist: voyageai>=0.2.3; extra == 'voyageai'
46
51
  Description-Content-Type: text/markdown
47
52
 
48
53
  <p align="center">
@@ -153,7 +158,7 @@ Requirements:
153
158
 
154
159
  - Python 3.10 or higher
155
160
  - Neo4j 5.26 / FalkorDB 1.1.2 or higher (serves as the embeddings storage backend)
156
- - OpenAI API key (for LLM inference and embedding)
161
+ - OpenAI API key (Graphiti defaults to OpenAI for LLM inference and embedding)
157
162
 
158
163
  > [!IMPORTANT]
159
164
  > Graphiti works best with LLM services that support Structured Output (such as OpenAI and Gemini).
@@ -167,6 +172,12 @@ Optional:
167
172
  > [!TIP]
168
173
  > The simplest way to install Neo4j is via [Neo4j Desktop](https://neo4j.com/download/). It provides a user-friendly
169
174
  > interface to manage Neo4j instances and databases.
175
+ > Alternatively, you can use FalkorDB on-premises via Docker and instantly start with the quickstart example:
176
+
177
+ ```bash
178
+ docker run -p 6379:6379 -p 3000:3000 -it --rm falkordb/falkordb:latest
179
+
180
+ ```
170
181
 
171
182
  ```bash
172
183
  pip install graphiti-core
@@ -178,7 +189,18 @@ or
178
189
  uv add graphiti-core
179
190
  ```
180
191
 
181
- You can also install optional LLM providers as extras:
192
+ ### Installing with FalkorDB Support
193
+
194
+ If you plan to use FalkorDB as your graph database backend, install with the FalkorDB extra:
195
+
196
+ ```bash
197
+ pip install graphiti-core[falkordb]
198
+
199
+ # or with uv
200
+ uv add graphiti-core[falkordb]
201
+ ```
202
+
203
+ ### You can also install optional LLM providers as extras:
182
204
 
183
205
  ```bash
184
206
  # Install with Anthropic support
@@ -192,18 +214,21 @@ pip install graphiti-core[google-genai]
192
214
 
193
215
  # Install with multiple providers
194
216
  pip install graphiti-core[anthropic,groq,google-genai]
217
+
218
+ # Install with FalkorDB and LLM providers
219
+ pip install graphiti-core[falkordb,anthropic,google-genai]
195
220
  ```
196
221
 
197
222
  ## Quick Start
198
223
 
199
224
  > [!IMPORTANT]
200
- > Graphiti uses OpenAI for LLM inference and embedding. Ensure that an `OPENAI_API_KEY` is set in your environment.
225
+ > Graphiti defaults to using OpenAI for LLM inference and embedding. Ensure that an `OPENAI_API_KEY` is set in your environment.
201
226
  > Support for Anthropic and Groq LLM inferences is available, too. Other LLM providers may be supported via OpenAI
202
227
  > compatible APIs.
203
228
 
204
229
  For a complete working example, see the [Quickstart Example](./examples/quickstart/README.md) in the examples directory. The quickstart demonstrates:
205
230
 
206
- 1. Connecting to a Neo4j database
231
+ 1. Connecting to a Neo4j or FalkorDB database
207
232
  2. Initializing Graphiti indices and constraints
208
233
  3. Adding episodes to the graph (both text and structured JSON)
209
234
  4. Searching for relationships (edges) using hybrid search
@@ -247,7 +272,7 @@ as such this feature is off by default.
247
272
 
248
273
  ## Using Graphiti with Azure OpenAI
249
274
 
250
- Graphiti supports Azure OpenAI for both LLM inference and embeddings. To use Azure OpenAI, you'll need to configure both the LLM client and embedder with your Azure OpenAI credentials.
275
+ Graphiti supports Azure OpenAI for both LLM inference and embeddings. Azure deployments often require different endpoints for LLM and embedding services, and separate deployments for default and small models.
251
276
 
252
277
  ```python
253
278
  from openai import AsyncAzureOpenAI
@@ -256,19 +281,26 @@ from graphiti_core.llm_client import LLMConfig, OpenAIClient
256
281
  from graphiti_core.embedder.openai import OpenAIEmbedder, OpenAIEmbedderConfig
257
282
  from graphiti_core.cross_encoder.openai_reranker_client import OpenAIRerankerClient
258
283
 
259
- # Azure OpenAI configuration
284
+ # Azure OpenAI configuration - use separate endpoints for different services
260
285
  api_key = "<your-api-key>"
261
286
  api_version = "<your-api-version>"
262
- azure_endpoint = "<your-azure-endpoint>"
287
+ llm_endpoint = "<your-llm-endpoint>" # e.g., "https://your-llm-resource.openai.azure.com/"
288
+ embedding_endpoint = "<your-embedding-endpoint>" # e.g., "https://your-embedding-resource.openai.azure.com/"
263
289
 
264
- # Create Azure OpenAI client for LLM
265
- azure_openai_client = AsyncAzureOpenAI(
290
+ # Create separate Azure OpenAI clients for different services
291
+ llm_client_azure = AsyncAzureOpenAI(
266
292
  api_key=api_key,
267
293
  api_version=api_version,
268
- azure_endpoint=azure_endpoint
294
+ azure_endpoint=llm_endpoint
269
295
  )
270
296
 
271
- # Create LLM Config with your Azure deployed model names
297
+ embedding_client_azure = AsyncAzureOpenAI(
298
+ api_key=api_key,
299
+ api_version=api_version,
300
+ azure_endpoint=embedding_endpoint
301
+ )
302
+
303
+ # Create LLM Config with your Azure deployment names
272
304
  azure_llm_config = LLMConfig(
273
305
  small_model="gpt-4.1-nano",
274
306
  model="gpt-4.1-mini",
@@ -281,29 +313,30 @@ graphiti = Graphiti(
281
313
  "password",
282
314
  llm_client=OpenAIClient(
283
315
  llm_config=azure_llm_config,
284
- client=azure_openai_client
316
+ client=llm_client_azure
285
317
  ),
286
318
  embedder=OpenAIEmbedder(
287
319
  config=OpenAIEmbedderConfig(
288
- embedding_model="text-embedding-3-small" # Use your Azure deployed embedding model name
320
+ embedding_model="text-embedding-3-small-deployment" # Your Azure embedding deployment name
289
321
  ),
290
- client=azure_openai_client
322
+ client=embedding_client_azure
291
323
  ),
292
- # Optional: Configure the OpenAI cross encoder with Azure OpenAI
293
324
  cross_encoder=OpenAIRerankerClient(
294
- llm_config=azure_llm_config,
295
- client=azure_openai_client
325
+ llm_config=LLMConfig(
326
+ model=azure_llm_config.small_model # Use small model for reranking
327
+ ),
328
+ client=llm_client_azure
296
329
  )
297
330
  )
298
331
 
299
332
  # Now you can use Graphiti with Azure OpenAI
300
333
  ```
301
334
 
302
- Make sure to replace the placeholder values with your actual Azure OpenAI credentials and specify the correct embedding model name that's deployed in your Azure OpenAI service.
335
+ Make sure to replace the placeholder values with your actual Azure OpenAI credentials and deployment names that match your Azure OpenAI service configuration.
303
336
 
304
337
  ## Using Graphiti with Google Gemini
305
338
 
306
- Graphiti supports Google's Gemini models for both LLM inference and embeddings. To use Gemini, you'll need to configure both the LLM client and embedder with your Google API key.
339
+ Graphiti supports Google's Gemini models for LLM inference, embeddings, and cross-encoding/reranking. To use Gemini, you'll need to configure the LLM client, embedder, and the cross-encoder with your Google API key.
307
340
 
308
341
  Install Graphiti:
309
342
 
@@ -319,6 +352,7 @@ pip install "graphiti-core[google-genai]"
319
352
  from graphiti_core import Graphiti
320
353
  from graphiti_core.llm_client.gemini_client import GeminiClient, LLMConfig
321
354
  from graphiti_core.embedder.gemini import GeminiEmbedder, GeminiEmbedderConfig
355
+ from graphiti_core.cross_encoder.gemini_reranker_client import GeminiRerankerClient
322
356
 
323
357
  # Google API key configuration
324
358
  api_key = "<your-google-api-key>"
@@ -339,12 +373,20 @@ graphiti = Graphiti(
339
373
  api_key=api_key,
340
374
  embedding_model="embedding-001"
341
375
  )
376
+ ),
377
+ cross_encoder=GeminiRerankerClient(
378
+ config=LLMConfig(
379
+ api_key=api_key,
380
+ model="gemini-2.5-flash-lite-preview-06-17"
381
+ )
342
382
  )
343
383
  )
344
384
 
345
- # Now you can use Graphiti with Google Gemini
385
+ # Now you can use Graphiti with Google Gemini for all components
346
386
  ```
347
387
 
388
+ The Gemini reranker uses the `gemini-2.5-flash-lite-preview-06-17` model by default, which is optimized for cost-effective and low-latency classification tasks. It uses the same boolean classification approach as the OpenAI reranker, leveraging Gemini's log probabilities feature to rank passage relevance.
389
+
348
390
  ## Using Graphiti with Ollama (Local LLM)
349
391
 
350
392
  Graphiti supports Ollama for running local LLMs and embedding models via Ollama's OpenAI-compatible API. This is ideal for privacy-focused applications or when you want to avoid API costs.
@@ -106,7 +106,7 @@ Requirements:
106
106
 
107
107
  - Python 3.10 or higher
108
108
  - Neo4j 5.26 / FalkorDB 1.1.2 or higher (serves as the embeddings storage backend)
109
- - OpenAI API key (for LLM inference and embedding)
109
+ - OpenAI API key (Graphiti defaults to OpenAI for LLM inference and embedding)
110
110
 
111
111
  > [!IMPORTANT]
112
112
  > Graphiti works best with LLM services that support Structured Output (such as OpenAI and Gemini).
@@ -120,6 +120,12 @@ Optional:
120
120
  > [!TIP]
121
121
  > The simplest way to install Neo4j is via [Neo4j Desktop](https://neo4j.com/download/). It provides a user-friendly
122
122
  > interface to manage Neo4j instances and databases.
123
+ > Alternatively, you can use FalkorDB on-premises via Docker and instantly start with the quickstart example:
124
+
125
+ ```bash
126
+ docker run -p 6379:6379 -p 3000:3000 -it --rm falkordb/falkordb:latest
127
+
128
+ ```
123
129
 
124
130
  ```bash
125
131
  pip install graphiti-core
@@ -131,7 +137,18 @@ or
131
137
  uv add graphiti-core
132
138
  ```
133
139
 
134
- You can also install optional LLM providers as extras:
140
+ ### Installing with FalkorDB Support
141
+
142
+ If you plan to use FalkorDB as your graph database backend, install with the FalkorDB extra:
143
+
144
+ ```bash
145
+ pip install graphiti-core[falkordb]
146
+
147
+ # or with uv
148
+ uv add graphiti-core[falkordb]
149
+ ```
150
+
151
+ ### You can also install optional LLM providers as extras:
135
152
 
136
153
  ```bash
137
154
  # Install with Anthropic support
@@ -145,18 +162,21 @@ pip install graphiti-core[google-genai]
145
162
 
146
163
  # Install with multiple providers
147
164
  pip install graphiti-core[anthropic,groq,google-genai]
165
+
166
+ # Install with FalkorDB and LLM providers
167
+ pip install graphiti-core[falkordb,anthropic,google-genai]
148
168
  ```
149
169
 
150
170
  ## Quick Start
151
171
 
152
172
  > [!IMPORTANT]
153
- > Graphiti uses OpenAI for LLM inference and embedding. Ensure that an `OPENAI_API_KEY` is set in your environment.
173
+ > Graphiti defaults to using OpenAI for LLM inference and embedding. Ensure that an `OPENAI_API_KEY` is set in your environment.
154
174
  > Support for Anthropic and Groq LLM inferences is available, too. Other LLM providers may be supported via OpenAI
155
175
  > compatible APIs.
156
176
 
157
177
  For a complete working example, see the [Quickstart Example](./examples/quickstart/README.md) in the examples directory. The quickstart demonstrates:
158
178
 
159
- 1. Connecting to a Neo4j database
179
+ 1. Connecting to a Neo4j or FalkorDB database
160
180
  2. Initializing Graphiti indices and constraints
161
181
  3. Adding episodes to the graph (both text and structured JSON)
162
182
  4. Searching for relationships (edges) using hybrid search
@@ -200,7 +220,7 @@ as such this feature is off by default.
200
220
 
201
221
  ## Using Graphiti with Azure OpenAI
202
222
 
203
- Graphiti supports Azure OpenAI for both LLM inference and embeddings. To use Azure OpenAI, you'll need to configure both the LLM client and embedder with your Azure OpenAI credentials.
223
+ Graphiti supports Azure OpenAI for both LLM inference and embeddings. Azure deployments often require different endpoints for LLM and embedding services, and separate deployments for default and small models.
204
224
 
205
225
  ```python
206
226
  from openai import AsyncAzureOpenAI
@@ -209,19 +229,26 @@ from graphiti_core.llm_client import LLMConfig, OpenAIClient
209
229
  from graphiti_core.embedder.openai import OpenAIEmbedder, OpenAIEmbedderConfig
210
230
  from graphiti_core.cross_encoder.openai_reranker_client import OpenAIRerankerClient
211
231
 
212
- # Azure OpenAI configuration
232
+ # Azure OpenAI configuration - use separate endpoints for different services
213
233
  api_key = "<your-api-key>"
214
234
  api_version = "<your-api-version>"
215
- azure_endpoint = "<your-azure-endpoint>"
235
+ llm_endpoint = "<your-llm-endpoint>" # e.g., "https://your-llm-resource.openai.azure.com/"
236
+ embedding_endpoint = "<your-embedding-endpoint>" # e.g., "https://your-embedding-resource.openai.azure.com/"
216
237
 
217
- # Create Azure OpenAI client for LLM
218
- azure_openai_client = AsyncAzureOpenAI(
238
+ # Create separate Azure OpenAI clients for different services
239
+ llm_client_azure = AsyncAzureOpenAI(
219
240
  api_key=api_key,
220
241
  api_version=api_version,
221
- azure_endpoint=azure_endpoint
242
+ azure_endpoint=llm_endpoint
222
243
  )
223
244
 
224
- # Create LLM Config with your Azure deployed model names
245
+ embedding_client_azure = AsyncAzureOpenAI(
246
+ api_key=api_key,
247
+ api_version=api_version,
248
+ azure_endpoint=embedding_endpoint
249
+ )
250
+
251
+ # Create LLM Config with your Azure deployment names
225
252
  azure_llm_config = LLMConfig(
226
253
  small_model="gpt-4.1-nano",
227
254
  model="gpt-4.1-mini",
@@ -234,29 +261,30 @@ graphiti = Graphiti(
234
261
  "password",
235
262
  llm_client=OpenAIClient(
236
263
  llm_config=azure_llm_config,
237
- client=azure_openai_client
264
+ client=llm_client_azure
238
265
  ),
239
266
  embedder=OpenAIEmbedder(
240
267
  config=OpenAIEmbedderConfig(
241
- embedding_model="text-embedding-3-small" # Use your Azure deployed embedding model name
268
+ embedding_model="text-embedding-3-small-deployment" # Your Azure embedding deployment name
242
269
  ),
243
- client=azure_openai_client
270
+ client=embedding_client_azure
244
271
  ),
245
- # Optional: Configure the OpenAI cross encoder with Azure OpenAI
246
272
  cross_encoder=OpenAIRerankerClient(
247
- llm_config=azure_llm_config,
248
- client=azure_openai_client
273
+ llm_config=LLMConfig(
274
+ model=azure_llm_config.small_model # Use small model for reranking
275
+ ),
276
+ client=llm_client_azure
249
277
  )
250
278
  )
251
279
 
252
280
  # Now you can use Graphiti with Azure OpenAI
253
281
  ```
254
282
 
255
- Make sure to replace the placeholder values with your actual Azure OpenAI credentials and specify the correct embedding model name that's deployed in your Azure OpenAI service.
283
+ Make sure to replace the placeholder values with your actual Azure OpenAI credentials and deployment names that match your Azure OpenAI service configuration.
256
284
 
257
285
  ## Using Graphiti with Google Gemini
258
286
 
259
- Graphiti supports Google's Gemini models for both LLM inference and embeddings. To use Gemini, you'll need to configure both the LLM client and embedder with your Google API key.
287
+ Graphiti supports Google's Gemini models for LLM inference, embeddings, and cross-encoding/reranking. To use Gemini, you'll need to configure the LLM client, embedder, and the cross-encoder with your Google API key.
260
288
 
261
289
  Install Graphiti:
262
290
 
@@ -272,6 +300,7 @@ pip install "graphiti-core[google-genai]"
272
300
  from graphiti_core import Graphiti
273
301
  from graphiti_core.llm_client.gemini_client import GeminiClient, LLMConfig
274
302
  from graphiti_core.embedder.gemini import GeminiEmbedder, GeminiEmbedderConfig
303
+ from graphiti_core.cross_encoder.gemini_reranker_client import GeminiRerankerClient
275
304
 
276
305
  # Google API key configuration
277
306
  api_key = "<your-google-api-key>"
@@ -292,12 +321,20 @@ graphiti = Graphiti(
292
321
  api_key=api_key,
293
322
  embedding_model="embedding-001"
294
323
  )
324
+ ),
325
+ cross_encoder=GeminiRerankerClient(
326
+ config=LLMConfig(
327
+ api_key=api_key,
328
+ model="gemini-2.5-flash-lite-preview-06-17"
329
+ )
295
330
  )
296
331
  )
297
332
 
298
- # Now you can use Graphiti with Google Gemini
333
+ # Now you can use Graphiti with Google Gemini for all components
299
334
  ```
300
335
 
336
+ The Gemini reranker uses the `gemini-2.5-flash-lite-preview-06-17` model by default, which is optimized for cost-effective and low-latency classification tasks. It uses the same boolean classification approach as the OpenAI reranker, leveraging Gemini's log probabilities feature to rank passage relevance.
337
+
301
338
  ## Using Graphiti with Ollama (Local LLM)
302
339
 
303
340
  Graphiti supports Ollama for running local LLMs and embedding models via Ollama's OpenAI-compatible API. This is ideal for privacy-focused applications or when you want to avoid API costs.