graphiti-core 0.8.1__tar.gz → 0.8.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphiti-core might be problematic. Click here for more details.

Files changed (61) hide show
  1. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/PKG-INFO +52 -32
  2. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/README.md +50 -31
  3. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/llm_client/anthropic_client.py +5 -2
  4. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/llm_client/client.py +8 -3
  5. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/models/nodes/node_db_queries.py +2 -2
  6. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/prompts/summarize_nodes.py +4 -2
  7. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/utils/maintenance/node_operations.py +7 -2
  8. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/pyproject.toml +7 -9
  9. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/LICENSE +0 -0
  10. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/__init__.py +0 -0
  11. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/cross_encoder/__init__.py +0 -0
  12. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/cross_encoder/bge_reranker_client.py +0 -0
  13. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/cross_encoder/client.py +0 -0
  14. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/cross_encoder/openai_reranker_client.py +0 -0
  15. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/edges.py +0 -0
  16. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/embedder/__init__.py +0 -0
  17. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/embedder/client.py +0 -0
  18. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/embedder/openai.py +0 -0
  19. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/embedder/voyage.py +0 -0
  20. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/errors.py +0 -0
  21. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/graphiti.py +0 -0
  22. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/helpers.py +0 -0
  23. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/llm_client/__init__.py +0 -0
  24. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/llm_client/config.py +0 -0
  25. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/llm_client/errors.py +0 -0
  26. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/llm_client/groq_client.py +0 -0
  27. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/llm_client/openai_client.py +0 -0
  28. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/llm_client/openai_generic_client.py +0 -0
  29. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/llm_client/utils.py +0 -0
  30. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/models/__init__.py +0 -0
  31. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/models/edges/__init__.py +0 -0
  32. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/models/edges/edge_db_queries.py +0 -0
  33. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/models/nodes/__init__.py +0 -0
  34. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/nodes.py +0 -0
  35. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/prompts/__init__.py +0 -0
  36. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/prompts/dedupe_edges.py +0 -0
  37. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/prompts/dedupe_nodes.py +0 -0
  38. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/prompts/eval.py +0 -0
  39. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/prompts/extract_edge_dates.py +0 -0
  40. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/prompts/extract_edges.py +0 -0
  41. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/prompts/extract_nodes.py +0 -0
  42. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/prompts/invalidate_edges.py +0 -0
  43. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/prompts/lib.py +0 -0
  44. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/prompts/models.py +0 -0
  45. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/prompts/prompt_helpers.py +0 -0
  46. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/py.typed +0 -0
  47. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/search/__init__.py +0 -0
  48. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/search/search.py +0 -0
  49. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/search/search_config.py +0 -0
  50. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/search/search_config_recipes.py +0 -0
  51. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/search/search_filters.py +0 -0
  52. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/search/search_utils.py +0 -0
  53. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/utils/__init__.py +0 -0
  54. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/utils/bulk_utils.py +0 -0
  55. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/utils/datetime_utils.py +0 -0
  56. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/utils/maintenance/__init__.py +0 -0
  57. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/utils/maintenance/community_operations.py +0 -0
  58. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/utils/maintenance/edge_operations.py +0 -0
  59. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/utils/maintenance/graph_data_operations.py +0 -0
  60. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/utils/maintenance/temporal_operations.py +0 -0
  61. {graphiti_core-0.8.1 → graphiti_core-0.8.3}/graphiti_core/utils/maintenance/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: graphiti-core
3
- Version: 0.8.1
3
+ Version: 0.8.3
4
4
  Summary: A temporal graph building library
5
5
  License: Apache-2.0
6
6
  Author: Paul Paliychuk
@@ -11,6 +11,7 @@ Classifier: Programming Language :: Python :: 3
11
11
  Classifier: Programming Language :: Python :: 3.10
12
12
  Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
+ Requires-Dist: anthropic (>=0.49.0,<0.50.0)
14
15
  Requires-Dist: diskcache (>=5.6.3,<6.0.0)
15
16
  Requires-Dist: neo4j (>=5.23.0,<6.0.0)
16
17
  Requires-Dist: numpy (>=1.0.0)
@@ -29,7 +30,7 @@ Description-Content-Type: text/markdown
29
30
  <h1 align="center">
30
31
  Graphiti
31
32
  </h1>
32
- <h2 align="center"> Temporal Knowledge Graphs for Agentic Applications</h2>
33
+ <h2 align="center"> Build Real-Time Knowledge Graphs for AI Agents</h2>
33
34
  <br />
34
35
 
35
36
  [![Discord](https://dcbadge.vercel.app/api/server/W8Kw6bsgXQ?style=flat)](https://discord.com/invite/W8Kw6bsgXQ)
@@ -41,9 +42,13 @@ Graphiti
41
42
  :star: _Help us reach more developers and grow the Graphiti community. Star this repo!_
42
43
  <br />
43
44
 
44
- Graphiti builds dynamic, temporally aware Knowledge Graphs that represent complex, evolving relationships between
45
- entities over time. Graphiti ingests both unstructured and structured data, and the resulting graph may be queried using
46
- a fusion of time, full-text, semantic, and graph algorithm approaches, effectively serving as a powerful memory layer for AI applications.
45
+ Graphiti is a framework for building and querying temporally-aware knowledge graphs, specifically tailored for AI agents operating in dynamic environments. Unlike traditional retrieval-augmented generation (RAG) methods, Graphiti continuously integrates user interactions, structured and unstructured enterprise data, and external information into a coherent, queryable graph. The framework supports incremental data updates, efficient retrieval, and precise historical queries without requiring complete graph recomputation, making it suitable for developing interactive, context-aware AI applications.
46
+
47
+ Use Graphiti to:
48
+
49
+ - Integrate and maintain dynamic user interactions and business data.
50
+ - Facilitate state-based reasoning and task automation for agents.
51
+ - Query complex, evolving data with semantic, keyword, and graph-based search methods.
47
52
 
48
53
  <br />
49
54
 
@@ -53,23 +58,14 @@ a fusion of time, full-text, semantic, and graph algorithm approaches, effective
53
58
 
54
59
  <br />
55
60
 
56
- Graphiti helps you create and query Knowledge Graphs that evolve over time. A knowledge graph is a network of
57
- interconnected facts, such as _“Kendra loves Adidas shoes.”_ Each fact is a “triplet” represented by two entities, or
61
+ A knowledge graph is a network of interconnected facts, such as _“Kendra loves Adidas shoes.”_ Each fact is a “triplet” represented by two entities, or
58
62
  nodes (_”Kendra”_, _“Adidas shoes”_), and their relationship, or edge (_”loves”_). Knowledge Graphs have been explored
59
63
  extensively for information retrieval. What makes Graphiti unique is its ability to autonomously build a knowledge graph
60
64
  while handling changing relationships and maintaining historical context.
61
65
 
62
- With Graphiti, you can build LLM applications such as:
63
-
64
- - Assistants that learn from user interactions, fusing personal knowledge with dynamic data from business systems like
65
- CRMs and billing platforms through robust conversation history management.
66
- - Agents that autonomously execute complex tasks, reasoning with state changes from multiple dynamic sources through persistent memory.
67
-
68
- Graphiti supports a wide range of applications in sales, customer service, health, finance, and more, enabling long-term recall and state-based reasoning for both assistants and agents.
69
-
70
66
  ## Graphiti and Zep Memory
71
67
 
72
- Graphiti powers the core of [Zep's memory layer](https://www.getzep.com) for LLM-powered Assistants and Agents.
68
+ Graphiti powers the core of [Zep's memory layer](https://www.getzep.com) for AI Agents.
73
69
 
74
70
  Using Graphiti, we've demonstrated Zep is
75
71
  the [State of the Art in Agent Memory](https://blog.getzep.com/state-of-the-art-agent-memory/).
@@ -84,28 +80,35 @@ We're excited to open-source Graphiti, believing its potential reaches far beyon
84
80
 
85
81
  ## Why Graphiti?
86
82
 
87
- We were intrigued by Microsoft's GraphRAG, which expanded on RAG (Retrieval-Augmented Generation) text chunking by using a graph to better model a
88
- document corpus and making this representation available via semantic and graph search techniques. However, GraphRAG did
89
- not address our core problem: It's primarily designed for static documents and doesn't inherently handle temporal
90
- aspects of data.
91
-
92
- Graphiti is designed from the ground up to handle constantly changing information, hybrid semantic and graph search, and
93
- scale:
83
+ Traditional RAG approaches often rely on batch processing and static data summarization, making them inefficient for frequently changing data. Graphiti addresses these challenges by providing:
94
84
 
95
- - **Temporal Awareness:** Tracks changes in facts and relationships over time, enabling point-in-time queries. Graph
96
- edges include temporal metadata to record relationship lifecycles, creating a comprehensive context window extension.
97
- - **Episodic Processing:** Ingests data as discrete episodes, maintaining data provenance and allowing incremental
98
- entity and relationship extraction, ideal for chat state management.
99
- - **Hybrid Search:** Combines semantic and BM25 full-text search, with the ability to rerank results by distance from a
100
- central node e.g. "Kendra".
101
- - **Scalable:** Designed for processing large datasets, with parallelization of LLM calls for bulk processing while
102
- preserving the chronology of events and enabling efficient knowledge retrieval.
103
- - **Supports Varied Sources:** Can ingest both unstructured text and structured JSON data.
85
+ - **Real-Time Incremental Updates:** Immediate integration of new data episodes without batch recomputation.
86
+ - **Bi-Temporal Data Model:** Explicit tracking of event occurrence and ingestion times, allowing accurate point-in-time queries.
87
+ - **Efficient Hybrid Retrieval:** Combines semantic embeddings, keyword (BM25), and graph traversal to achieve low-latency queries without reliance on LLM summarization.
88
+ - **Custom Entity Definitions:** Flexible ontology creation and support for developer-defined entities through straightforward Pydantic models.
89
+ - **Scalability:** Efficiently manages large datasets with parallel processing, suitable for enterprise environments.
104
90
 
105
91
  <p align="center">
106
92
  <img src="/images/graphiti-intro-slides-stock-2.gif" alt="Graphiti structured + unstructured demo" width="700px">
107
93
  </p>
108
94
 
95
+ ## Graphiti vs. GraphRAG
96
+
97
+ | Aspect | GraphRAG | Graphiti |
98
+ | -------------------------- | ------------------------------------- | ------------------------------------------------ |
99
+ | **Primary Use** | Static document summarization | Dynamic data management |
100
+ | **Data Handling** | Batch-oriented processing | Continuous, incremental updates |
101
+ | **Knowledge Structure** | Entity clusters & community summaries | Episodic data, semantic entities, communities |
102
+ | **Retrieval Method** | Sequential LLM summarization | Hybrid semantic, keyword, and graph-based search |
103
+ | **Adaptability** | Low | High |
104
+ | **Temporal Handling** | Basic timestamp tracking | Explicit bi-temporal tracking |
105
+ | **Contradiction Handling** | LLM-driven summarization judgments | Temporal edge invalidation |
106
+ | **Query Latency** | Seconds to tens of seconds | Typically sub-second latency |
107
+ | **Custom Entity Types** | No | Yes, customizable |
108
+ | **Scalability** | Moderate | High, optimized for large datasets |
109
+
110
+ Graphiti is specifically designed to address the challenges of dynamic and frequently updated datasets, making it particularly suitable for applications requiring real-time interaction and precise historical queries.
111
+
109
112
  ## Installation
110
113
 
111
114
  Requirements:
@@ -207,6 +210,22 @@ The `server` directory contains an API service for interacting with the Graphiti
207
210
 
208
211
  Please see the [server README](./server/README.md) for more information.
209
212
 
213
+ ## MCP Server
214
+
215
+ The `mcp_server` directory contains a Model Context Protocol (MCP) server implementation for Graphiti. This server allows AI assistants to interact with Graphiti's knowledge graph capabilities through the MCP protocol.
216
+
217
+ Key features of the MCP server include:
218
+
219
+ - Episode management (add, retrieve, delete)
220
+ - Entity management and relationship handling
221
+ - Semantic and hybrid search capabilities
222
+ - Group management for organizing related data
223
+ - Graph maintenance operations
224
+
225
+ The MCP server can be deployed using Docker with Neo4j, making it easy to integrate Graphiti into your AI assistant workflows.
226
+
227
+ For detailed setup instructions and usage examples, see the [MCP server README](./mcp_server/README.md).
228
+
210
229
  ## Optional Environment Variables
211
230
 
212
231
  In addition to the Neo4j and OpenAi-compatible credentials, Graphiti also has a few optional environment variables.
@@ -280,6 +299,7 @@ Graphiti is under active development. We aim to maintain API stability while wor
280
299
  - Allow developers to provide their own defined node and edge classes when ingesting episodes
281
300
  - Enable more flexible knowledge representation tailored to specific use cases
282
301
  - [x] Enhancing retrieval capabilities with more robust and configurable options
302
+ - [x] Graphiti MCP Server
283
303
  - [ ] Expanding test coverage to ensure reliability and catch edge cases
284
304
 
285
305
  ## Contributing
@@ -7,7 +7,7 @@
7
7
  <h1 align="center">
8
8
  Graphiti
9
9
  </h1>
10
- <h2 align="center"> Temporal Knowledge Graphs for Agentic Applications</h2>
10
+ <h2 align="center"> Build Real-Time Knowledge Graphs for AI Agents</h2>
11
11
  <br />
12
12
 
13
13
  [![Discord](https://dcbadge.vercel.app/api/server/W8Kw6bsgXQ?style=flat)](https://discord.com/invite/W8Kw6bsgXQ)
@@ -19,9 +19,13 @@ Graphiti
19
19
  :star: _Help us reach more developers and grow the Graphiti community. Star this repo!_
20
20
  <br />
21
21
 
22
- Graphiti builds dynamic, temporally aware Knowledge Graphs that represent complex, evolving relationships between
23
- entities over time. Graphiti ingests both unstructured and structured data, and the resulting graph may be queried using
24
- a fusion of time, full-text, semantic, and graph algorithm approaches, effectively serving as a powerful memory layer for AI applications.
22
+ Graphiti is a framework for building and querying temporally-aware knowledge graphs, specifically tailored for AI agents operating in dynamic environments. Unlike traditional retrieval-augmented generation (RAG) methods, Graphiti continuously integrates user interactions, structured and unstructured enterprise data, and external information into a coherent, queryable graph. The framework supports incremental data updates, efficient retrieval, and precise historical queries without requiring complete graph recomputation, making it suitable for developing interactive, context-aware AI applications.
23
+
24
+ Use Graphiti to:
25
+
26
+ - Integrate and maintain dynamic user interactions and business data.
27
+ - Facilitate state-based reasoning and task automation for agents.
28
+ - Query complex, evolving data with semantic, keyword, and graph-based search methods.
25
29
 
26
30
  <br />
27
31
 
@@ -31,23 +35,14 @@ a fusion of time, full-text, semantic, and graph algorithm approaches, effective
31
35
 
32
36
  <br />
33
37
 
34
- Graphiti helps you create and query Knowledge Graphs that evolve over time. A knowledge graph is a network of
35
- interconnected facts, such as _“Kendra loves Adidas shoes.”_ Each fact is a “triplet” represented by two entities, or
38
+ A knowledge graph is a network of interconnected facts, such as _“Kendra loves Adidas shoes.”_ Each fact is a “triplet” represented by two entities, or
36
39
  nodes (_”Kendra”_, _“Adidas shoes”_), and their relationship, or edge (_”loves”_). Knowledge Graphs have been explored
37
40
  extensively for information retrieval. What makes Graphiti unique is its ability to autonomously build a knowledge graph
38
41
  while handling changing relationships and maintaining historical context.
39
42
 
40
- With Graphiti, you can build LLM applications such as:
41
-
42
- - Assistants that learn from user interactions, fusing personal knowledge with dynamic data from business systems like
43
- CRMs and billing platforms through robust conversation history management.
44
- - Agents that autonomously execute complex tasks, reasoning with state changes from multiple dynamic sources through persistent memory.
45
-
46
- Graphiti supports a wide range of applications in sales, customer service, health, finance, and more, enabling long-term recall and state-based reasoning for both assistants and agents.
47
-
48
43
  ## Graphiti and Zep Memory
49
44
 
50
- Graphiti powers the core of [Zep's memory layer](https://www.getzep.com) for LLM-powered Assistants and Agents.
45
+ Graphiti powers the core of [Zep's memory layer](https://www.getzep.com) for AI Agents.
51
46
 
52
47
  Using Graphiti, we've demonstrated Zep is
53
48
  the [State of the Art in Agent Memory](https://blog.getzep.com/state-of-the-art-agent-memory/).
@@ -62,28 +57,35 @@ We're excited to open-source Graphiti, believing its potential reaches far beyon
62
57
 
63
58
  ## Why Graphiti?
64
59
 
65
- We were intrigued by Microsoft's GraphRAG, which expanded on RAG (Retrieval-Augmented Generation) text chunking by using a graph to better model a
66
- document corpus and making this representation available via semantic and graph search techniques. However, GraphRAG did
67
- not address our core problem: It's primarily designed for static documents and doesn't inherently handle temporal
68
- aspects of data.
69
-
70
- Graphiti is designed from the ground up to handle constantly changing information, hybrid semantic and graph search, and
71
- scale:
60
+ Traditional RAG approaches often rely on batch processing and static data summarization, making them inefficient for frequently changing data. Graphiti addresses these challenges by providing:
72
61
 
73
- - **Temporal Awareness:** Tracks changes in facts and relationships over time, enabling point-in-time queries. Graph
74
- edges include temporal metadata to record relationship lifecycles, creating a comprehensive context window extension.
75
- - **Episodic Processing:** Ingests data as discrete episodes, maintaining data provenance and allowing incremental
76
- entity and relationship extraction, ideal for chat state management.
77
- - **Hybrid Search:** Combines semantic and BM25 full-text search, with the ability to rerank results by distance from a
78
- central node e.g. "Kendra".
79
- - **Scalable:** Designed for processing large datasets, with parallelization of LLM calls for bulk processing while
80
- preserving the chronology of events and enabling efficient knowledge retrieval.
81
- - **Supports Varied Sources:** Can ingest both unstructured text and structured JSON data.
62
+ - **Real-Time Incremental Updates:** Immediate integration of new data episodes without batch recomputation.
63
+ - **Bi-Temporal Data Model:** Explicit tracking of event occurrence and ingestion times, allowing accurate point-in-time queries.
64
+ - **Efficient Hybrid Retrieval:** Combines semantic embeddings, keyword (BM25), and graph traversal to achieve low-latency queries without reliance on LLM summarization.
65
+ - **Custom Entity Definitions:** Flexible ontology creation and support for developer-defined entities through straightforward Pydantic models.
66
+ - **Scalability:** Efficiently manages large datasets with parallel processing, suitable for enterprise environments.
82
67
 
83
68
  <p align="center">
84
69
  <img src="/images/graphiti-intro-slides-stock-2.gif" alt="Graphiti structured + unstructured demo" width="700px">
85
70
  </p>
86
71
 
72
+ ## Graphiti vs. GraphRAG
73
+
74
+ | Aspect | GraphRAG | Graphiti |
75
+ | -------------------------- | ------------------------------------- | ------------------------------------------------ |
76
+ | **Primary Use** | Static document summarization | Dynamic data management |
77
+ | **Data Handling** | Batch-oriented processing | Continuous, incremental updates |
78
+ | **Knowledge Structure** | Entity clusters & community summaries | Episodic data, semantic entities, communities |
79
+ | **Retrieval Method** | Sequential LLM summarization | Hybrid semantic, keyword, and graph-based search |
80
+ | **Adaptability** | Low | High |
81
+ | **Temporal Handling** | Basic timestamp tracking | Explicit bi-temporal tracking |
82
+ | **Contradiction Handling** | LLM-driven summarization judgments | Temporal edge invalidation |
83
+ | **Query Latency** | Seconds to tens of seconds | Typically sub-second latency |
84
+ | **Custom Entity Types** | No | Yes, customizable |
85
+ | **Scalability** | Moderate | High, optimized for large datasets |
86
+
87
+ Graphiti is specifically designed to address the challenges of dynamic and frequently updated datasets, making it particularly suitable for applications requiring real-time interaction and precise historical queries.
88
+
87
89
  ## Installation
88
90
 
89
91
  Requirements:
@@ -185,6 +187,22 @@ The `server` directory contains an API service for interacting with the Graphiti
185
187
 
186
188
  Please see the [server README](./server/README.md) for more information.
187
189
 
190
+ ## MCP Server
191
+
192
+ The `mcp_server` directory contains a Model Context Protocol (MCP) server implementation for Graphiti. This server allows AI assistants to interact with Graphiti's knowledge graph capabilities through the MCP protocol.
193
+
194
+ Key features of the MCP server include:
195
+
196
+ - Episode management (add, retrieve, delete)
197
+ - Entity management and relationship handling
198
+ - Semantic and hybrid search capabilities
199
+ - Group management for organizing related data
200
+ - Graph maintenance operations
201
+
202
+ The MCP server can be deployed using Docker with Neo4j, making it easy to integrate Graphiti into your AI assistant workflows.
203
+
204
+ For detailed setup instructions and usage examples, see the [MCP server README](./mcp_server/README.md).
205
+
188
206
  ## Optional Environment Variables
189
207
 
190
208
  In addition to the Neo4j and OpenAi-compatible credentials, Graphiti also has a few optional environment variables.
@@ -258,6 +276,7 @@ Graphiti is under active development. We aim to maintain API stability while wor
258
276
  - Allow developers to provide their own defined node and edge classes when ingesting episodes
259
277
  - Enable more flexible knowledge representation tailored to specific use cases
260
278
  - [x] Enhancing retrieval capabilities with more robust and configurable options
279
+ - [x] Graphiti MCP Server
261
280
  - [ ] Expanding test coverage to ensure reliability and catch edge cases
262
281
 
263
282
  ## Contributing
@@ -29,7 +29,7 @@ from .errors import RateLimitError
29
29
 
30
30
  logger = logging.getLogger(__name__)
31
31
 
32
- DEFAULT_MODEL = 'claude-3-5-sonnet-20240620'
32
+ DEFAULT_MODEL = 'claude-3-7-sonnet-latest'
33
33
  DEFAULT_MAX_TOKENS = 8192
34
34
 
35
35
 
@@ -58,11 +58,14 @@ class AnthropicClient(LLMClient):
58
58
  {'role': 'assistant', 'content': '{'}
59
59
  ]
60
60
 
61
+ # Ensure max_tokens is not greater than config.max_tokens or DEFAULT_MAX_TOKENS
62
+ max_tokens = min(max_tokens, self.config.max_tokens, DEFAULT_MAX_TOKENS)
63
+
61
64
  try:
62
65
  result = await self.client.messages.create(
63
66
  system='Only include JSON in the response. Do not include any additional text or explanation of the content.\n'
64
67
  + system_message.content,
65
- max_tokens=max_tokens or self.max_tokens,
68
+ max_tokens=max_tokens,
66
69
  temperature=self.temperature,
67
70
  messages=user_messages, # type: ignore
68
71
  model=self.model or DEFAULT_MODEL,
@@ -54,7 +54,11 @@ class LLMClient(ABC):
54
54
  self.temperature = config.temperature
55
55
  self.max_tokens = config.max_tokens
56
56
  self.cache_enabled = cache
57
- self.cache_dir = Cache(DEFAULT_CACHE_DIR) # Create a cache directory
57
+ self.cache_dir = None
58
+
59
+ # Only create the cache directory if caching is enabled
60
+ if self.cache_enabled:
61
+ self.cache_dir = Cache(DEFAULT_CACHE_DIR)
58
62
 
59
63
  def _clean_input(self, input: str) -> str:
60
64
  """Clean input string of invalid unicode and control characters.
@@ -129,7 +133,7 @@ class LLMClient(ABC):
129
133
  f'\n\nRespond with a JSON object in the following format:\n\n{serialized_model}'
130
134
  )
131
135
 
132
- if self.cache_enabled:
136
+ if self.cache_enabled and self.cache_dir is not None:
133
137
  cache_key = self._get_cache_key(messages)
134
138
 
135
139
  cached_response = self.cache_dir.get(cache_key)
@@ -142,7 +146,8 @@ class LLMClient(ABC):
142
146
 
143
147
  response = await self._generate_response_with_retry(messages, response_model, max_tokens)
144
148
 
145
- if self.cache_enabled:
149
+ if self.cache_enabled and self.cache_dir is not None:
150
+ cache_key = self._get_cache_key(messages)
146
151
  self.cache_dir.set(cache_key, response)
147
152
 
148
153
  return response
@@ -30,10 +30,10 @@ EPISODIC_NODE_SAVE_BULK = """
30
30
  """
31
31
 
32
32
  ENTITY_NODE_SAVE = """
33
- MERGE (n:Entity {uuid: $uuid})
33
+ MERGE (n:Entity {uuid: $entity_data.uuid})
34
34
  SET n:$($labels)
35
35
  SET n = $entity_data
36
- WITH n CALL db.create.setNodeVectorProperty(n, "name_embedding", $name_embedding)
36
+ WITH n CALL db.create.setNodeVectorProperty(n, "name_embedding", $entity_data.name_embedding)
37
37
  RETURN n.uuid AS uuid"""
38
38
 
39
39
  ENTITY_NODE_SAVE_BULK = """
@@ -85,8 +85,10 @@ def summarize_context(context: dict[str, Any]) -> list[Message]:
85
85
  provided ENTITY. Summaries must be under 500 words.
86
86
 
87
87
  In addition, extract any values for the provided entity properties based on their descriptions.
88
- If the value of the entity property cannot be found in the current context, set the value of the property to None.
89
- Do not hallucinate entity property values if they cannot be found in the current context.
88
+ If the value of the entity property cannot be found in the current context, set the value of the property to the Python value None.
89
+
90
+ Guidelines:
91
+ 1. Do not hallucinate entity property values if they cannot be found in the current context.
90
92
 
91
93
  <ENTITY>
92
94
  {context['node_name']}
@@ -364,7 +364,11 @@ async def resolve_extracted_node(
364
364
  )
365
365
 
366
366
  extracted_node.summary = node_attributes_response.get('summary', '')
367
- extracted_node.attributes.update(node_attributes_response)
367
+ node_attributes = {
368
+ key: value if value != 'None' else None for key, value in node_attributes_response.items()
369
+ }
370
+
371
+ extracted_node.attributes.update(node_attributes)
368
372
 
369
373
  is_duplicate: bool = llm_response.get('is_duplicate', False)
370
374
  uuid: str | None = llm_response.get('uuid', None)
@@ -386,11 +390,12 @@ async def resolve_extracted_node(
386
390
  node.name = name
387
391
  node.summary = summary_response.get('summary', '')
388
392
 
389
- new_attributes = existing_node.attributes
393
+ new_attributes = extracted_node.attributes
390
394
  existing_attributes = existing_node.attributes
391
395
  for attribute_name, attribute_value in existing_attributes.items():
392
396
  if new_attributes.get(attribute_name) is None:
393
397
  new_attributes[attribute_name] = attribute_value
398
+ node.attributes = new_attributes
394
399
 
395
400
  uuid_map[extracted_node.uuid] = existing_node.uuid
396
401
 
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "graphiti-core"
3
- version = "0.8.1"
3
+ version = "0.8.3"
4
4
  description = "A temporal graph building library"
5
5
  authors = [
6
6
  "Paul Paliychuk <paul@getzep.com>",
@@ -21,18 +21,12 @@ openai = "^1.53.0"
21
21
  tenacity = "9.0.0"
22
22
  numpy = ">=1.0.0"
23
23
  python-dotenv = "^1.0.1"
24
-
25
- [tool.poetry.dev-dependencies]
26
- pytest = "^8.3.3"
27
- pytest-asyncio = "^0.24.0"
28
- pytest-xdist = "^3.6.1"
29
- ruff = "^0.7.1"
24
+ anthropic = "~0.49.0"
30
25
 
31
26
  [tool.poetry.group.dev.dependencies]
32
- pydantic = "^2.8.2"
33
27
  mypy = "^1.11.1"
34
28
  groq = ">=0.9,<0.12"
35
- anthropic = ">=0.34.1,<0.36.0"
29
+ anthropic = "~0.49.0"
36
30
  ipykernel = "^6.29.5"
37
31
  jupyterlab = "^4.2.4"
38
32
  diskcache-stubs = "^5.6.3.6.20240818"
@@ -43,6 +37,10 @@ langchain-openai = "^0.2.6"
43
37
  sentence-transformers = "^3.2.1"
44
38
  transformers = "^4.45.2"
45
39
  voyageai = "^0.2.3"
40
+ pytest = "^8.3.3"
41
+ pytest-asyncio = "^0.24.0"
42
+ pytest-xdist = "^3.6.1"
43
+ ruff = "^0.7.1"
46
44
 
47
45
  [build-system]
48
46
  requires = ["poetry-core"]
File without changes