graphiti-core 0.12.0rc1__py3-none-any.whl → 0.24.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. graphiti_core/cross_encoder/bge_reranker_client.py +12 -2
  2. graphiti_core/cross_encoder/gemini_reranker_client.py +161 -0
  3. graphiti_core/cross_encoder/openai_reranker_client.py +7 -5
  4. graphiti_core/decorators.py +110 -0
  5. graphiti_core/driver/__init__.py +19 -0
  6. graphiti_core/driver/driver.py +124 -0
  7. graphiti_core/driver/falkordb_driver.py +362 -0
  8. graphiti_core/driver/graph_operations/graph_operations.py +191 -0
  9. graphiti_core/driver/kuzu_driver.py +182 -0
  10. graphiti_core/driver/neo4j_driver.py +117 -0
  11. graphiti_core/driver/neptune_driver.py +305 -0
  12. graphiti_core/driver/search_interface/search_interface.py +89 -0
  13. graphiti_core/edges.py +287 -172
  14. graphiti_core/embedder/azure_openai.py +71 -0
  15. graphiti_core/embedder/client.py +2 -1
  16. graphiti_core/embedder/gemini.py +116 -22
  17. graphiti_core/embedder/voyage.py +13 -2
  18. graphiti_core/errors.py +8 -0
  19. graphiti_core/graph_queries.py +162 -0
  20. graphiti_core/graphiti.py +705 -193
  21. graphiti_core/graphiti_types.py +4 -2
  22. graphiti_core/helpers.py +87 -10
  23. graphiti_core/llm_client/__init__.py +16 -0
  24. graphiti_core/llm_client/anthropic_client.py +159 -56
  25. graphiti_core/llm_client/azure_openai_client.py +115 -0
  26. graphiti_core/llm_client/client.py +98 -21
  27. graphiti_core/llm_client/config.py +1 -1
  28. graphiti_core/llm_client/gemini_client.py +290 -41
  29. graphiti_core/llm_client/groq_client.py +14 -3
  30. graphiti_core/llm_client/openai_base_client.py +261 -0
  31. graphiti_core/llm_client/openai_client.py +56 -132
  32. graphiti_core/llm_client/openai_generic_client.py +91 -56
  33. graphiti_core/models/edges/edge_db_queries.py +259 -35
  34. graphiti_core/models/nodes/node_db_queries.py +311 -32
  35. graphiti_core/nodes.py +420 -205
  36. graphiti_core/prompts/dedupe_edges.py +46 -32
  37. graphiti_core/prompts/dedupe_nodes.py +67 -42
  38. graphiti_core/prompts/eval.py +4 -4
  39. graphiti_core/prompts/extract_edges.py +27 -16
  40. graphiti_core/prompts/extract_nodes.py +74 -31
  41. graphiti_core/prompts/prompt_helpers.py +39 -0
  42. graphiti_core/prompts/snippets.py +29 -0
  43. graphiti_core/prompts/summarize_nodes.py +23 -25
  44. graphiti_core/search/search.py +158 -82
  45. graphiti_core/search/search_config.py +39 -4
  46. graphiti_core/search/search_filters.py +126 -35
  47. graphiti_core/search/search_helpers.py +5 -6
  48. graphiti_core/search/search_utils.py +1405 -485
  49. graphiti_core/telemetry/__init__.py +9 -0
  50. graphiti_core/telemetry/telemetry.py +117 -0
  51. graphiti_core/tracer.py +193 -0
  52. graphiti_core/utils/bulk_utils.py +364 -285
  53. graphiti_core/utils/datetime_utils.py +13 -0
  54. graphiti_core/utils/maintenance/community_operations.py +67 -49
  55. graphiti_core/utils/maintenance/dedup_helpers.py +262 -0
  56. graphiti_core/utils/maintenance/edge_operations.py +339 -197
  57. graphiti_core/utils/maintenance/graph_data_operations.py +50 -114
  58. graphiti_core/utils/maintenance/node_operations.py +319 -238
  59. graphiti_core/utils/maintenance/temporal_operations.py +11 -3
  60. graphiti_core/utils/ontology_utils/entity_types_utils.py +1 -1
  61. graphiti_core/utils/text_utils.py +53 -0
  62. graphiti_core-0.24.3.dist-info/METADATA +726 -0
  63. graphiti_core-0.24.3.dist-info/RECORD +86 -0
  64. {graphiti_core-0.12.0rc1.dist-info → graphiti_core-0.24.3.dist-info}/WHEEL +1 -1
  65. graphiti_core-0.12.0rc1.dist-info/METADATA +0 -350
  66. graphiti_core-0.12.0rc1.dist-info/RECORD +0 -66
  67. /graphiti_core/{utils/maintenance/utils.py → migrations/__init__.py} +0 -0
  68. {graphiti_core-0.12.0rc1.dist-info → graphiti_core-0.24.3.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,726 @@
1
+ Metadata-Version: 2.4
2
+ Name: graphiti-core
3
+ Version: 0.24.3
4
+ Summary: A temporal graph building library
5
+ Project-URL: Homepage, https://help.getzep.com/graphiti/graphiti/overview
6
+ Project-URL: Repository, https://github.com/getzep/graphiti
7
+ Author-email: Paul Paliychuk <paul@getzep.com>, Preston Rasmussen <preston@getzep.com>, Daniel Chalef <daniel@getzep.com>
8
+ License-Expression: Apache-2.0
9
+ License-File: LICENSE
10
+ Requires-Python: <4,>=3.10
11
+ Requires-Dist: diskcache>=5.6.3
12
+ Requires-Dist: neo4j>=5.26.0
13
+ Requires-Dist: numpy>=1.0.0
14
+ Requires-Dist: openai>=1.91.0
15
+ Requires-Dist: posthog>=3.0.0
16
+ Requires-Dist: pydantic>=2.11.5
17
+ Requires-Dist: python-dotenv>=1.0.1
18
+ Requires-Dist: tenacity>=9.0.0
19
+ Provides-Extra: anthropic
20
+ Requires-Dist: anthropic>=0.49.0; extra == 'anthropic'
21
+ Provides-Extra: dev
22
+ Requires-Dist: anthropic>=0.49.0; extra == 'dev'
23
+ Requires-Dist: boto3>=1.39.16; extra == 'dev'
24
+ Requires-Dist: diskcache-stubs>=5.6.3.6.20240818; extra == 'dev'
25
+ Requires-Dist: falkordb<2.0.0,>=1.1.2; extra == 'dev'
26
+ Requires-Dist: google-genai>=1.8.0; extra == 'dev'
27
+ Requires-Dist: groq>=0.2.0; extra == 'dev'
28
+ Requires-Dist: ipykernel>=6.29.5; extra == 'dev'
29
+ Requires-Dist: jupyterlab>=4.2.4; extra == 'dev'
30
+ Requires-Dist: kuzu>=0.11.3; extra == 'dev'
31
+ Requires-Dist: langchain-anthropic>=0.2.4; extra == 'dev'
32
+ Requires-Dist: langchain-aws>=0.2.29; extra == 'dev'
33
+ Requires-Dist: langchain-openai>=0.2.6; extra == 'dev'
34
+ Requires-Dist: langgraph>=0.2.15; extra == 'dev'
35
+ Requires-Dist: langsmith>=0.1.108; extra == 'dev'
36
+ Requires-Dist: opensearch-py>=3.0.0; extra == 'dev'
37
+ Requires-Dist: opentelemetry-sdk>=1.20.0; extra == 'dev'
38
+ Requires-Dist: pyright>=1.1.404; extra == 'dev'
39
+ Requires-Dist: pytest-asyncio>=0.24.0; extra == 'dev'
40
+ Requires-Dist: pytest-xdist>=3.6.1; extra == 'dev'
41
+ Requires-Dist: pytest>=8.3.3; extra == 'dev'
42
+ Requires-Dist: ruff>=0.7.1; extra == 'dev'
43
+ Requires-Dist: sentence-transformers>=3.2.1; extra == 'dev'
44
+ Requires-Dist: transformers>=4.45.2; extra == 'dev'
45
+ Requires-Dist: voyageai>=0.2.3; extra == 'dev'
46
+ Provides-Extra: falkordb
47
+ Requires-Dist: falkordb<2.0.0,>=1.1.2; extra == 'falkordb'
48
+ Provides-Extra: google-genai
49
+ Requires-Dist: google-genai>=1.8.0; extra == 'google-genai'
50
+ Provides-Extra: groq
51
+ Requires-Dist: groq>=0.2.0; extra == 'groq'
52
+ Provides-Extra: kuzu
53
+ Requires-Dist: kuzu>=0.11.3; extra == 'kuzu'
54
+ Provides-Extra: neo4j-opensearch
55
+ Requires-Dist: boto3>=1.39.16; extra == 'neo4j-opensearch'
56
+ Requires-Dist: opensearch-py>=3.0.0; extra == 'neo4j-opensearch'
57
+ Provides-Extra: neptune
58
+ Requires-Dist: boto3>=1.39.16; extra == 'neptune'
59
+ Requires-Dist: langchain-aws>=0.2.29; extra == 'neptune'
60
+ Requires-Dist: opensearch-py>=3.0.0; extra == 'neptune'
61
+ Provides-Extra: sentence-transformers
62
+ Requires-Dist: sentence-transformers>=3.2.1; extra == 'sentence-transformers'
63
+ Provides-Extra: tracing
64
+ Requires-Dist: opentelemetry-api>=1.20.0; extra == 'tracing'
65
+ Requires-Dist: opentelemetry-sdk>=1.20.0; extra == 'tracing'
66
+ Provides-Extra: voyageai
67
+ Requires-Dist: voyageai>=0.2.3; extra == 'voyageai'
68
+ Description-Content-Type: text/markdown
69
+
70
+ <p align="center">
71
+ <a href="https://www.getzep.com/">
72
+ <img src="https://github.com/user-attachments/assets/119c5682-9654-4257-8922-56b7cb8ffd73" width="150" alt="Zep Logo">
73
+ </a>
74
+ </p>
75
+
76
+ <h1 align="center">
77
+ Graphiti
78
+ </h1>
79
+ <h2 align="center"> Build Real-Time Knowledge Graphs for AI Agents</h2>
80
+ <div align="center">
81
+
82
+ [![Lint](https://github.com/getzep/Graphiti/actions/workflows/lint.yml/badge.svg?style=flat)](https://github.com/getzep/Graphiti/actions/workflows/lint.yml)
83
+ [![Unit Tests](https://github.com/getzep/Graphiti/actions/workflows/unit_tests.yml/badge.svg)](https://github.com/getzep/Graphiti/actions/workflows/unit_tests.yml)
84
+ [![MyPy Check](https://github.com/getzep/Graphiti/actions/workflows/typecheck.yml/badge.svg)](https://github.com/getzep/Graphiti/actions/workflows/typecheck.yml)
85
+
86
+ ![GitHub Repo stars](https://img.shields.io/github/stars/getzep/graphiti)
87
+ [![Discord](https://img.shields.io/badge/Discord-%235865F2.svg?&logo=discord&logoColor=white)](https://discord.com/invite/W8Kw6bsgXQ)
88
+ [![arXiv](https://img.shields.io/badge/arXiv-2501.13956-b31b1b.svg?style=flat)](https://arxiv.org/abs/2501.13956)
89
+ [![Release](https://img.shields.io/github/v/release/getzep/graphiti?style=flat&label=Release&color=limegreen)](https://github.com/getzep/graphiti/releases)
90
+
91
+ </div>
92
+ <div align="center">
93
+
94
+ <a href="https://trendshift.io/repositories/12986" target="_blank"><img src="https://trendshift.io/api/badge/repositories/12986" alt="getzep%2Fgraphiti | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
95
+
96
+ </div>
97
+
98
+ :star: _Help us reach more developers and grow the Graphiti community. Star this repo!_
99
+
100
+ <br />
101
+
102
+ > [!TIP]
103
+ > Check out the new [MCP server for Graphiti](mcp_server/README.md)! Give Claude, Cursor, and other MCP clients powerful
104
+ > Knowledge Graph-based memory.
105
+
106
+ Graphiti is a framework for building and querying temporally-aware knowledge graphs, specifically tailored for AI agents
107
+ operating in dynamic environments. Unlike traditional retrieval-augmented generation (RAG) methods, Graphiti
108
+ continuously integrates user interactions, structured and unstructured enterprise data, and external information into a
109
+ coherent, queryable graph. The framework supports incremental data updates, efficient retrieval, and precise historical
110
+ queries without requiring complete graph recomputation, making it suitable for developing interactive, context-aware AI
111
+ applications.
112
+
113
+ Use Graphiti to:
114
+
115
+ - Integrate and maintain dynamic user interactions and business data.
116
+ - Facilitate state-based reasoning and task automation for agents.
117
+ - Query complex, evolving data with semantic, keyword, and graph-based search methods.
118
+
119
+ <br />
120
+
121
+ <p align="center">
122
+ <img src="images/graphiti-graph-intro.gif" alt="Graphiti temporal walkthrough" width="700px">
123
+ </p>
124
+
125
+ <br />
126
+
127
+ A knowledge graph is a network of interconnected facts, such as _"Kendra loves Adidas shoes."_ Each fact is a "triplet"
128
+ represented by two entities, or
129
+ nodes ("Kendra", "Adidas shoes"), and their relationship, or edge ("loves"). Knowledge Graphs have been explored
130
+ extensively for information retrieval. What makes Graphiti unique is its ability to autonomously build a knowledge graph
131
+ while handling changing relationships and maintaining historical context.
132
+
133
+ ## Graphiti and Zep's Context Engineering Platform.
134
+
135
+ Graphiti powers the core of [Zep](https://www.getzep.com), a turn-key context engineering platform for AI Agents. Zep
136
+ offers agent memory, Graph RAG for dynamic data, and context retrieval and assembly.
137
+
138
+ Using Graphiti, we've demonstrated Zep is
139
+ the [State of the Art in Agent Memory](https://blog.getzep.com/state-of-the-art-agent-memory/).
140
+
141
+ Read our paper: [Zep: A Temporal Knowledge Graph Architecture for Agent Memory](https://arxiv.org/abs/2501.13956).
142
+
143
+ We're excited to open-source Graphiti, believing its potential reaches far beyond AI memory applications.
144
+
145
+ <p align="center">
146
+ <a href="https://arxiv.org/abs/2501.13956"><img src="images/arxiv-screenshot.png" alt="Zep: A Temporal Knowledge Graph Architecture for Agent Memory" width="700px"></a>
147
+ </p>
148
+
149
+ ## Zep vs Graphiti
150
+
151
+ | Aspect | Zep | Graphiti |
152
+ |--------|-----|----------|
153
+ | **What they are** | Fully managed platform for context engineering and AI memory | Open-source graph framework |
154
+ | **User & conversation management** | Built-in users, threads, and message storage | Build your own |
155
+ | **Retrieval & performance** | Pre-configured, production-ready retrieval with sub-200ms performance at scale | Custom implementation required; performance depends on your setup |
156
+ | **Developer tools** | Dashboard with graph visualization, debug logs, API logs; SDKs for Python, TypeScript, and Go | Build your own tools |
157
+ | **Enterprise features** | SLAs, support, security guarantees | Self-managed |
158
+ | **Deployment** | Fully managed or in your cloud | Self-hosted only |
159
+
160
+ ### When to choose which
161
+
162
+ **Choose Zep** if you want a turnkey, enterprise-grade platform with security, performance, and support baked in.
163
+
164
+ **Choose Graphiti** if you want a flexible OSS core and you're comfortable building/operating the surrounding system.
165
+
166
+ ## Why Graphiti?
167
+
168
+ Traditional RAG approaches often rely on batch processing and static data summarization, making them inefficient for
169
+ frequently changing data. Graphiti addresses these challenges by providing:
170
+
171
+ - **Real-Time Incremental Updates:** Immediate integration of new data episodes without batch recomputation.
172
+ - **Bi-Temporal Data Model:** Explicit tracking of event occurrence and ingestion times, allowing accurate point-in-time
173
+ queries.
174
+ - **Efficient Hybrid Retrieval:** Combines semantic embeddings, keyword (BM25), and graph traversal to achieve
175
+ low-latency queries without reliance on LLM summarization.
176
+ - **Custom Entity Definitions:** Flexible ontology creation and support for developer-defined entities through
177
+ straightforward Pydantic models.
178
+ - **Scalability:** Efficiently manages large datasets with parallel processing, suitable for enterprise environments.
179
+
180
+ <p align="center">
181
+ <img src="/images/graphiti-intro-slides-stock-2.gif" alt="Graphiti structured + unstructured demo" width="700px">
182
+ </p>
183
+
184
+ ## Graphiti vs. GraphRAG
185
+
186
+ | Aspect | GraphRAG | Graphiti |
187
+ |----------------------------|---------------------------------------|--------------------------------------------------|
188
+ | **Primary Use** | Static document summarization | Dynamic data management |
189
+ | **Data Handling** | Batch-oriented processing | Continuous, incremental updates |
190
+ | **Knowledge Structure** | Entity clusters & community summaries | Episodic data, semantic entities, communities |
191
+ | **Retrieval Method** | Sequential LLM summarization | Hybrid semantic, keyword, and graph-based search |
192
+ | **Adaptability** | Low | High |
193
+ | **Temporal Handling** | Basic timestamp tracking | Explicit bi-temporal tracking |
194
+ | **Contradiction Handling** | LLM-driven summarization judgments | Temporal edge invalidation |
195
+ | **Query Latency** | Seconds to tens of seconds | Typically sub-second latency |
196
+ | **Custom Entity Types** | No | Yes, customizable |
197
+ | **Scalability** | Moderate | High, optimized for large datasets |
198
+
199
+ Graphiti is specifically designed to address the challenges of dynamic and frequently updated datasets, making it
200
+ particularly suitable for applications requiring real-time interaction and precise historical queries.
201
+
202
+ ## Installation
203
+
204
+ Requirements:
205
+
206
+ - Python 3.10 or higher
207
+ - Neo4j 5.26 / FalkorDB 1.1.2 / Kuzu 0.11.2 / Amazon Neptune Database Cluster or Neptune Analytics Graph + Amazon
208
+ OpenSearch Serverless collection (serves as the full text search backend)
209
+ - OpenAI API key (Graphiti defaults to OpenAI for LLM inference and embedding)
210
+
211
+ > [!IMPORTANT]
212
+ > Graphiti works best with LLM services that support Structured Output (such as OpenAI and Gemini).
213
+ > Using other services may result in incorrect output schemas and ingestion failures. This is particularly
214
+ > problematic when using smaller models.
215
+
216
+ Optional:
217
+
218
+ - Google Gemini, Anthropic, or Groq API key (for alternative LLM providers)
219
+
220
+ > [!TIP]
221
+ > The simplest way to install Neo4j is via [Neo4j Desktop](https://neo4j.com/download/). It provides a user-friendly
222
+ > interface to manage Neo4j instances and databases.
223
+ > Alternatively, you can use FalkorDB on-premises via Docker and instantly start with the quickstart example:
224
+
225
+ ```bash
226
+ docker run -p 6379:6379 -p 3000:3000 -it --rm falkordb/falkordb:latest
227
+
228
+ ```
229
+
230
+ ```bash
231
+ pip install graphiti-core
232
+ ```
233
+
234
+ or
235
+
236
+ ```bash
237
+ uv add graphiti-core
238
+ ```
239
+
240
+ ### Installing with FalkorDB Support
241
+
242
+ If you plan to use FalkorDB as your graph database backend, install with the FalkorDB extra:
243
+
244
+ ```bash
245
+ pip install graphiti-core[falkordb]
246
+
247
+ # or with uv
248
+ uv add graphiti-core[falkordb]
249
+ ```
250
+
251
+ ### Installing with Kuzu Support
252
+
253
+ If you plan to use Kuzu as your graph database backend, install with the Kuzu extra:
254
+
255
+ ```bash
256
+ pip install graphiti-core[kuzu]
257
+
258
+ # or with uv
259
+ uv add graphiti-core[kuzu]
260
+ ```
261
+
262
+ ### Installing with Amazon Neptune Support
263
+
264
+ If you plan to use Amazon Neptune as your graph database backend, install with the Amazon Neptune extra:
265
+
266
+ ```bash
267
+ pip install graphiti-core[neptune]
268
+
269
+ # or with uv
270
+ uv add graphiti-core[neptune]
271
+ ```
272
+
273
+ ### You can also install optional LLM providers as extras:
274
+
275
+ ```bash
276
+ # Install with Anthropic support
277
+ pip install graphiti-core[anthropic]
278
+
279
+ # Install with Groq support
280
+ pip install graphiti-core[groq]
281
+
282
+ # Install with Google Gemini support
283
+ pip install graphiti-core[google-genai]
284
+
285
+ # Install with multiple providers
286
+ pip install graphiti-core[anthropic,groq,google-genai]
287
+
288
+ # Install with FalkorDB and LLM providers
289
+ pip install graphiti-core[falkordb,anthropic,google-genai]
290
+
291
+ # Install with Amazon Neptune
292
+ pip install graphiti-core[neptune]
293
+ ```
294
+
295
+ ## Default to Low Concurrency; LLM Provider 429 Rate Limit Errors
296
+
297
+ Graphiti's ingestion pipelines are designed for high concurrency. By default, concurrency is set low to avoid LLM
298
+ Provider 429 Rate Limit Errors. If you find Graphiti slow, please increase concurrency as described below.
299
+
300
+ Concurrency controlled by the `SEMAPHORE_LIMIT` environment variable. By default, `SEMAPHORE_LIMIT` is set to `10`
301
+ concurrent operations to help prevent `429` rate limit errors from your LLM provider. If you encounter such errors, try
302
+ lowering this value.
303
+
304
+ If your LLM provider allows higher throughput, you can increase `SEMAPHORE_LIMIT` to boost episode ingestion
305
+ performance.
306
+
307
+ ## Quick Start
308
+
309
+ > [!IMPORTANT]
310
+ > Graphiti defaults to using OpenAI for LLM inference and embedding. Ensure that an `OPENAI_API_KEY` is set in your
311
+ > environment.
312
+ > Support for Anthropic and Groq LLM inferences is available, too. Other LLM providers may be supported via OpenAI
313
+ > compatible APIs.
314
+
315
+ For a complete working example, see the [Quickstart Example](./examples/quickstart/README.md) in the examples directory.
316
+ The quickstart demonstrates:
317
+
318
+ 1. Connecting to a Neo4j, Amazon Neptune, FalkorDB, or Kuzu database
319
+ 2. Initializing Graphiti indices and constraints
320
+ 3. Adding episodes to the graph (both text and structured JSON)
321
+ 4. Searching for relationships (edges) using hybrid search
322
+ 5. Reranking search results using graph distance
323
+ 6. Searching for nodes using predefined search recipes
324
+
325
+ The example is fully documented with clear explanations of each functionality and includes a comprehensive README with
326
+ setup instructions and next steps.
327
+
328
+ ### Running with Docker Compose
329
+
330
+ You can use Docker Compose to quickly start the required services:
331
+
332
+ - **Neo4j Docker:**
333
+ ```sh
334
+ docker compose up
335
+ ```
336
+ This will start the Neo4j Docker service and related components.
337
+
338
+ - **FalkorDB Docker:**
339
+ ```sh
340
+ docker compose --profile falkordb up
341
+ ```
342
+ This will start the FalkorDB Docker service and related components.
343
+
344
+ ## MCP Server
345
+
346
+ The `mcp_server` directory contains a Model Context Protocol (MCP) server implementation for Graphiti. This server
347
+ allows AI assistants to interact with Graphiti's knowledge graph capabilities through the MCP protocol.
348
+
349
+ Key features of the MCP server include:
350
+
351
+ - Episode management (add, retrieve, delete)
352
+ - Entity management and relationship handling
353
+ - Semantic and hybrid search capabilities
354
+ - Group management for organizing related data
355
+ - Graph maintenance operations
356
+
357
+ The MCP server can be deployed using Docker with Neo4j, making it easy to integrate Graphiti into your AI assistant
358
+ workflows.
359
+
360
+ For detailed setup instructions and usage examples, see the [MCP server README](./mcp_server/README.md).
361
+
362
+ ## REST Service
363
+
364
+ The `server` directory contains an API service for interacting with the Graphiti API. It is built using FastAPI.
365
+
366
+ Please see the [server README](./server/README.md) for more information.
367
+
368
+ ## Optional Environment Variables
369
+
370
+ In addition to the Neo4j and OpenAi-compatible credentials, Graphiti also has a few optional environment variables.
371
+ If you are using one of our supported models, such as Anthropic or Voyage models, the necessary environment variables
372
+ must be set.
373
+
374
+ ### Database Configuration
375
+
376
+ Database names are configured directly in the driver constructors:
377
+
378
+ - **Neo4j**: Database name defaults to `neo4j` (hardcoded in Neo4jDriver)
379
+ - **FalkorDB**: Database name defaults to `default_db` (hardcoded in FalkorDriver)
380
+
381
+ As of v0.17.0, if you need to customize your database configuration, you can instantiate a database driver and pass it
382
+ to the Graphiti constructor using the `graph_driver` parameter.
383
+
384
+ #### Neo4j with Custom Database Name
385
+
386
+ ```python
387
+ from graphiti_core import Graphiti
388
+ from graphiti_core.driver.neo4j_driver import Neo4jDriver
389
+
390
+ # Create a Neo4j driver with custom database name
391
+ driver = Neo4jDriver(
392
+ uri="bolt://localhost:7687",
393
+ user="neo4j",
394
+ password="password",
395
+ database="my_custom_database" # Custom database name
396
+ )
397
+
398
+ # Pass the driver to Graphiti
399
+ graphiti = Graphiti(graph_driver=driver)
400
+ ```
401
+
402
+ #### FalkorDB with Custom Database Name
403
+
404
+ ```python
405
+ from graphiti_core import Graphiti
406
+ from graphiti_core.driver.falkordb_driver import FalkorDriver
407
+
408
+ # Create a FalkorDB driver with custom database name
409
+ driver = FalkorDriver(
410
+ host="localhost",
411
+ port=6379,
412
+ username="falkor_user", # Optional
413
+ password="falkor_password", # Optional
414
+ database="my_custom_graph" # Custom database name
415
+ )
416
+
417
+ # Pass the driver to Graphiti
418
+ graphiti = Graphiti(graph_driver=driver)
419
+ ```
420
+
421
+ #### Kuzu
422
+
423
+ ```python
424
+ from graphiti_core import Graphiti
425
+ from graphiti_core.driver.kuzu_driver import KuzuDriver
426
+
427
+ # Create a Kuzu driver
428
+ driver = KuzuDriver(db="/tmp/graphiti.kuzu")
429
+
430
+ # Pass the driver to Graphiti
431
+ graphiti = Graphiti(graph_driver=driver)
432
+ ```
433
+
434
+ #### Amazon Neptune
435
+
436
+ ```python
437
+ from graphiti_core import Graphiti
438
+ from graphiti_core.driver.neptune_driver import NeptuneDriver
439
+
440
+ # Create a FalkorDB driver with custom database name
441
+ driver = NeptuneDriver(
442
+ host= < NEPTUNE
443
+ ENDPOINT >,
444
+ aoss_host = < Amazon
445
+ OpenSearch
446
+ Serverless
447
+ Host >,
448
+ port = < PORT > # Optional, defaults to 8182,
449
+ aoss_port = < PORT > # Optional, defaults to 443
450
+ )
451
+
452
+ driver = NeptuneDriver(host=neptune_uri, aoss_host=aoss_host, port=neptune_port)
453
+
454
+ # Pass the driver to Graphiti
455
+ graphiti = Graphiti(graph_driver=driver)
456
+ ```
457
+
458
+ ## Using Graphiti with Azure OpenAI
459
+
460
+ Graphiti supports Azure OpenAI for both LLM inference and embeddings using Azure's OpenAI v1 API compatibility layer.
461
+
462
+ ### Quick Start
463
+
464
+ ```python
465
+ from openai import AsyncOpenAI
466
+ from graphiti_core import Graphiti
467
+ from graphiti_core.llm_client.azure_openai_client import AzureOpenAILLMClient
468
+ from graphiti_core.llm_client.config import LLMConfig
469
+ from graphiti_core.embedder.azure_openai import AzureOpenAIEmbedderClient
470
+
471
+ # Initialize Azure OpenAI client using the standard OpenAI client
472
+ # with Azure's v1 API endpoint
473
+ azure_client = AsyncOpenAI(
474
+ base_url="https://your-resource-name.openai.azure.com/openai/v1/",
475
+ api_key="your-api-key",
476
+ )
477
+
478
+ # Create LLM and Embedder clients
479
+ llm_client = AzureOpenAILLMClient(
480
+ azure_client=azure_client,
481
+ config=LLMConfig(model="gpt-5-mini", small_model="gpt-5-mini") # Your Azure deployment name
482
+ )
483
+ embedder_client = AzureOpenAIEmbedderClient(
484
+ azure_client=azure_client,
485
+ model="text-embedding-3-small" # Your Azure embedding deployment name
486
+ )
487
+
488
+ # Initialize Graphiti with Azure OpenAI clients
489
+ graphiti = Graphiti(
490
+ "bolt://localhost:7687",
491
+ "neo4j",
492
+ "password",
493
+ llm_client=llm_client,
494
+ embedder=embedder_client,
495
+ )
496
+
497
+ # Now you can use Graphiti with Azure OpenAI
498
+ ```
499
+
500
+ **Key Points:**
501
+ - Use the standard `AsyncOpenAI` client with Azure's v1 API endpoint format: `https://your-resource-name.openai.azure.com/openai/v1/`
502
+ - The deployment names (e.g., `gpt-5-mini`, `text-embedding-3-small`) should match your Azure OpenAI deployment names
503
+ - See `examples/azure-openai/` for a complete working example
504
+
505
+ Make sure to replace the placeholder values with your actual Azure OpenAI credentials and deployment names.
506
+
507
+ ## Using Graphiti with Google Gemini
508
+
509
+ Graphiti supports Google's Gemini models for LLM inference, embeddings, and cross-encoding/reranking. To use Gemini,
510
+ you'll need to configure the LLM client, embedder, and the cross-encoder with your Google API key.
511
+
512
+ Install Graphiti:
513
+
514
+ ```bash
515
+ uv add "graphiti-core[google-genai]"
516
+
517
+ # or
518
+
519
+ pip install "graphiti-core[google-genai]"
520
+ ```
521
+
522
+ ```python
523
+ from graphiti_core import Graphiti
524
+ from graphiti_core.llm_client.gemini_client import GeminiClient, LLMConfig
525
+ from graphiti_core.embedder.gemini import GeminiEmbedder, GeminiEmbedderConfig
526
+ from graphiti_core.cross_encoder.gemini_reranker_client import GeminiRerankerClient
527
+
528
+ # Google API key configuration
529
+ api_key = "<your-google-api-key>"
530
+
531
+ # Initialize Graphiti with Gemini clients
532
+ graphiti = Graphiti(
533
+ "bolt://localhost:7687",
534
+ "neo4j",
535
+ "password",
536
+ llm_client=GeminiClient(
537
+ config=LLMConfig(
538
+ api_key=api_key,
539
+ model="gemini-2.0-flash"
540
+ )
541
+ ),
542
+ embedder=GeminiEmbedder(
543
+ config=GeminiEmbedderConfig(
544
+ api_key=api_key,
545
+ embedding_model="embedding-001"
546
+ )
547
+ ),
548
+ cross_encoder=GeminiRerankerClient(
549
+ config=LLMConfig(
550
+ api_key=api_key,
551
+ model="gemini-2.5-flash-lite"
552
+ )
553
+ )
554
+ )
555
+
556
+ # Now you can use Graphiti with Google Gemini for all components
557
+ ```
558
+
559
+ The Gemini reranker uses the `gemini-2.5-flash-lite` model by default, which is optimized for
560
+ cost-effective and low-latency classification tasks. It uses the same boolean classification approach as the OpenAI
561
+ reranker, leveraging Gemini's log probabilities feature to rank passage relevance.
562
+
563
+ ## Using Graphiti with Ollama (Local LLM)
564
+
565
+ Graphiti supports Ollama for running local LLMs and embedding models via Ollama's OpenAI-compatible API. This is ideal
566
+ for privacy-focused applications or when you want to avoid API costs.
567
+
568
+ **Note:** Use `OpenAIGenericClient` (not `OpenAIClient`) for Ollama and other OpenAI-compatible providers like LM Studio. The `OpenAIGenericClient` is optimized for local models with a higher default max token limit (16K vs 8K) and full support for structured outputs.
569
+
570
+ Install the models:
571
+
572
+ ```bash
573
+ ollama pull deepseek-r1:7b # LLM
574
+ ollama pull nomic-embed-text # embeddings
575
+ ```
576
+
577
+ ```python
578
+ from graphiti_core import Graphiti
579
+ from graphiti_core.llm_client.config import LLMConfig
580
+ from graphiti_core.llm_client.openai_generic_client import OpenAIGenericClient
581
+ from graphiti_core.embedder.openai import OpenAIEmbedder, OpenAIEmbedderConfig
582
+ from graphiti_core.cross_encoder.openai_reranker_client import OpenAIRerankerClient
583
+
584
+ # Configure Ollama LLM client
585
+ llm_config = LLMConfig(
586
+ api_key="ollama", # Ollama doesn't require a real API key, but some placeholder is needed
587
+ model="deepseek-r1:7b",
588
+ small_model="deepseek-r1:7b",
589
+ base_url="http://localhost:11434/v1", # Ollama's OpenAI-compatible endpoint
590
+ )
591
+
592
+ llm_client = OpenAIGenericClient(config=llm_config)
593
+
594
+ # Initialize Graphiti with Ollama clients
595
+ graphiti = Graphiti(
596
+ "bolt://localhost:7687",
597
+ "neo4j",
598
+ "password",
599
+ llm_client=llm_client,
600
+ embedder=OpenAIEmbedder(
601
+ config=OpenAIEmbedderConfig(
602
+ api_key="ollama", # Placeholder API key
603
+ embedding_model="nomic-embed-text",
604
+ embedding_dim=768,
605
+ base_url="http://localhost:11434/v1",
606
+ )
607
+ ),
608
+ cross_encoder=OpenAIRerankerClient(client=llm_client, config=llm_config),
609
+ )
610
+
611
+ # Now you can use Graphiti with local Ollama models
612
+ ```
613
+
614
+ Ensure Ollama is running (`ollama serve`) and that you have pulled the models you want to use.
615
+
616
+ ## Documentation
617
+
618
+ - [Guides and API documentation](https://help.getzep.com/graphiti).
619
+ - [Quick Start](https://help.getzep.com/graphiti/graphiti/quick-start)
620
+ - [Building an agent with LangChain's LangGraph and Graphiti](https://help.getzep.com/graphiti/integrations/lang-graph-agent)
621
+
622
+ ## Telemetry
623
+
624
+ Graphiti collects anonymous usage statistics to help us understand how the framework is being used and improve it for
625
+ everyone. We believe transparency is important, so here's exactly what we collect and why.
626
+
627
+ ### What We Collect
628
+
629
+ When you initialize a Graphiti instance, we collect:
630
+
631
+ - **Anonymous identifier**: A randomly generated UUID stored locally in `~/.cache/graphiti/telemetry_anon_id`
632
+ - **System information**: Operating system, Python version, and system architecture
633
+ - **Graphiti version**: The version you're using
634
+ - **Configuration choices**:
635
+ - LLM provider type (OpenAI, Azure, Anthropic, etc.)
636
+ - Database backend (Neo4j, FalkorDB, Kuzu, Amazon Neptune Database or Neptune Analytics)
637
+ - Embedder provider (OpenAI, Azure, Voyage, etc.)
638
+
639
+ ### What We Don't Collect
640
+
641
+ We are committed to protecting your privacy. We **never** collect:
642
+
643
+ - Personal information or identifiers
644
+ - API keys or credentials
645
+ - Your actual data, queries, or graph content
646
+ - IP addresses or hostnames
647
+ - File paths or system-specific information
648
+ - Any content from your episodes, nodes, or edges
649
+
650
+ ### Why We Collect This Data
651
+
652
+ This information helps us:
653
+
654
+ - Understand which configurations are most popular to prioritize support and testing
655
+ - Identify which LLM and database providers to focus development efforts on
656
+ - Track adoption patterns to guide our roadmap
657
+ - Ensure compatibility across different Python versions and operating systems
658
+
659
+ By sharing this anonymous information, you help us make Graphiti better for everyone in the community.
660
+
661
+ ### View the Telemetry Code
662
+
663
+ The Telemetry code [may be found here](graphiti_core/telemetry/telemetry.py).
664
+
665
+ ### How to Disable Telemetry
666
+
667
+ Telemetry is **opt-out** and can be disabled at any time. To disable telemetry collection:
668
+
669
+ **Option 1: Environment Variable**
670
+
671
+ ```bash
672
+ export GRAPHITI_TELEMETRY_ENABLED=false
673
+ ```
674
+
675
+ **Option 2: Set in your shell profile**
676
+
677
+ ```bash
678
+ # For bash users (~/.bashrc or ~/.bash_profile)
679
+ echo 'export GRAPHITI_TELEMETRY_ENABLED=false' >> ~/.bashrc
680
+
681
+ # For zsh users (~/.zshrc)
682
+ echo 'export GRAPHITI_TELEMETRY_ENABLED=false' >> ~/.zshrc
683
+ ```
684
+
685
+ **Option 3: Set for a specific Python session**
686
+
687
+ ```python
688
+ import os
689
+
690
+ os.environ['GRAPHITI_TELEMETRY_ENABLED'] = 'false'
691
+
692
+ # Then initialize Graphiti as usual
693
+ from graphiti_core import Graphiti
694
+
695
+ graphiti = Graphiti(...)
696
+ ```
697
+
698
+ Telemetry is automatically disabled during test runs (when `pytest` is detected).
699
+
700
+ ### Technical Details
701
+
702
+ - Telemetry uses PostHog for anonymous analytics collection
703
+ - All telemetry operations are designed to fail silently - they will never interrupt your application or affect Graphiti
704
+ functionality
705
+ - The anonymous ID is stored locally and is not tied to any personal information
706
+
707
+ ## Status and Roadmap
708
+
709
+ Graphiti is under active development. We aim to maintain API stability while working on:
710
+
711
+ - [x] Supporting custom graph schemas:
712
+ - Allow developers to provide their own defined node and edge classes when ingesting episodes
713
+ - Enable more flexible knowledge representation tailored to specific use cases
714
+ - [x] Enhancing retrieval capabilities with more robust and configurable options
715
+ - [x] Graphiti MCP Server
716
+ - [ ] Expanding test coverage to ensure reliability and catch edge cases
717
+
718
+ ## Contributing
719
+
720
+ We encourage and appreciate all forms of contributions, whether it's code, documentation, addressing GitHub Issues, or
721
+ answering questions in the Graphiti Discord channel. For detailed guidelines on code contributions, please refer
722
+ to [CONTRIBUTING](CONTRIBUTING.md).
723
+
724
+ ## Support
725
+
726
+ Join the [Zep Discord server](https://discord.com/invite/W8Kw6bsgXQ) and make your way to the **#Graphiti** channel!