graphiti-core 0.8.2__py3-none-any.whl → 0.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphiti-core might be problematic. Click here for more details.

graphiti_core/edges.py CHANGED
@@ -26,7 +26,7 @@ from pydantic import BaseModel, Field
26
26
  from typing_extensions import LiteralString
27
27
 
28
28
  from graphiti_core.embedder import EmbedderClient
29
- from graphiti_core.errors import EdgeNotFoundError, GroupsEdgesNotFoundError
29
+ from graphiti_core.errors import EdgeNotFoundError, EdgesNotFoundError, GroupsEdgesNotFoundError
30
30
  from graphiti_core.helpers import DEFAULT_DATABASE, parse_db_date
31
31
  from graphiti_core.models.edges.edge_db_queries import (
32
32
  COMMUNITY_EDGE_SAVE,
@@ -261,6 +261,9 @@ class EntityEdge(Edge):
261
261
 
262
262
  @classmethod
263
263
  async def get_by_uuids(cls, driver: AsyncDriver, uuids: list[str]):
264
+ if len(uuids) == 0:
265
+ return []
266
+
264
267
  records, _, _ = await driver.execute_query(
265
268
  """
266
269
  MATCH (n:Entity)-[e:RELATES_TO]->(m:Entity)
@@ -287,7 +290,7 @@ class EntityEdge(Edge):
287
290
  edges = [get_entity_edge_from_record(record) for record in records]
288
291
 
289
292
  if len(edges) == 0:
290
- raise EdgeNotFoundError(uuids[0])
293
+ raise EdgesNotFoundError(uuids)
291
294
  return edges
292
295
 
293
296
  @classmethod
graphiti_core/errors.py CHANGED
@@ -27,6 +27,14 @@ class EdgeNotFoundError(GraphitiError):
27
27
  super().__init__(self.message)
28
28
 
29
29
 
30
+ class EdgesNotFoundError(GraphitiError):
31
+ """Raised when a list of edges is not found."""
32
+
33
+ def __init__(self, uuids: list[str]):
34
+ self.message = f'None of the edges for {uuids} were found.'
35
+ super().__init__(self.message)
36
+
37
+
30
38
  class GroupsEdgesNotFoundError(GraphitiError):
31
39
  """Raised when no edges are found for a list of group ids."""
32
40
 
@@ -29,7 +29,7 @@ from .errors import RateLimitError
29
29
 
30
30
  logger = logging.getLogger(__name__)
31
31
 
32
- DEFAULT_MODEL = 'claude-3-5-sonnet-20240620'
32
+ DEFAULT_MODEL = 'claude-3-7-sonnet-latest'
33
33
  DEFAULT_MAX_TOKENS = 8192
34
34
 
35
35
 
@@ -58,11 +58,14 @@ class AnthropicClient(LLMClient):
58
58
  {'role': 'assistant', 'content': '{'}
59
59
  ]
60
60
 
61
+ # Ensure max_tokens is not greater than config.max_tokens or DEFAULT_MAX_TOKENS
62
+ max_tokens = min(max_tokens, self.config.max_tokens, DEFAULT_MAX_TOKENS)
63
+
61
64
  try:
62
65
  result = await self.client.messages.create(
63
66
  system='Only include JSON in the response. Do not include any additional text or explanation of the content.\n'
64
67
  + system_message.content,
65
- max_tokens=max_tokens or self.max_tokens,
68
+ max_tokens=max_tokens,
66
69
  temperature=self.temperature,
67
70
  messages=user_messages, # type: ignore
68
71
  model=self.model or DEFAULT_MODEL,
@@ -54,7 +54,11 @@ class LLMClient(ABC):
54
54
  self.temperature = config.temperature
55
55
  self.max_tokens = config.max_tokens
56
56
  self.cache_enabled = cache
57
- self.cache_dir = Cache(DEFAULT_CACHE_DIR) # Create a cache directory
57
+ self.cache_dir = None
58
+
59
+ # Only create the cache directory if caching is enabled
60
+ if self.cache_enabled:
61
+ self.cache_dir = Cache(DEFAULT_CACHE_DIR)
58
62
 
59
63
  def _clean_input(self, input: str) -> str:
60
64
  """Clean input string of invalid unicode and control characters.
@@ -129,7 +133,7 @@ class LLMClient(ABC):
129
133
  f'\n\nRespond with a JSON object in the following format:\n\n{serialized_model}'
130
134
  )
131
135
 
132
- if self.cache_enabled:
136
+ if self.cache_enabled and self.cache_dir is not None:
133
137
  cache_key = self._get_cache_key(messages)
134
138
 
135
139
  cached_response = self.cache_dir.get(cache_key)
@@ -142,7 +146,8 @@ class LLMClient(ABC):
142
146
 
143
147
  response = await self._generate_response_with_retry(messages, response_model, max_tokens)
144
148
 
145
- if self.cache_enabled:
149
+ if self.cache_enabled and self.cache_dir is not None:
150
+ cache_key = self._get_cache_key(messages)
146
151
  self.cache_dir.set(cache_key, response)
147
152
 
148
153
  return response
@@ -85,8 +85,10 @@ def summarize_context(context: dict[str, Any]) -> list[Message]:
85
85
  provided ENTITY. Summaries must be under 500 words.
86
86
 
87
87
  In addition, extract any values for the provided entity properties based on their descriptions.
88
- If the value of the entity property cannot be found in the current context, set the value of the property to None.
89
- Do not hallucinate entity property values if they cannot be found in the current context.
88
+ If the value of the entity property cannot be found in the current context, set the value of the property to the Python value None.
89
+
90
+ Guidelines:
91
+ 1. Do not hallucinate entity property values if they cannot be found in the current context.
90
92
 
91
93
  <ENTITY>
92
94
  {context['node_name']}
@@ -364,7 +364,11 @@ async def resolve_extracted_node(
364
364
  )
365
365
 
366
366
  extracted_node.summary = node_attributes_response.get('summary', '')
367
- extracted_node.attributes.update(node_attributes_response)
367
+ node_attributes = {
368
+ key: value if value != 'None' else None for key, value in node_attributes_response.items()
369
+ }
370
+
371
+ extracted_node.attributes.update(node_attributes)
368
372
 
369
373
  is_duplicate: bool = llm_response.get('is_duplicate', False)
370
374
  uuid: str | None = llm_response.get('uuid', None)
@@ -386,11 +390,12 @@ async def resolve_extracted_node(
386
390
  node.name = name
387
391
  node.summary = summary_response.get('summary', '')
388
392
 
389
- new_attributes = existing_node.attributes
393
+ new_attributes = extracted_node.attributes
390
394
  existing_attributes = existing_node.attributes
391
395
  for attribute_name, attribute_value in existing_attributes.items():
392
396
  if new_attributes.get(attribute_name) is None:
393
397
  new_attributes[attribute_name] = attribute_value
398
+ node.attributes = new_attributes
394
399
 
395
400
  uuid_map[extracted_node.uuid] = existing_node.uuid
396
401
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: graphiti-core
3
- Version: 0.8.2
3
+ Version: 0.8.4
4
4
  Summary: A temporal graph building library
5
5
  License: Apache-2.0
6
6
  Author: Paul Paliychuk
@@ -11,6 +11,7 @@ Classifier: Programming Language :: Python :: 3
11
11
  Classifier: Programming Language :: Python :: 3.10
12
12
  Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
+ Requires-Dist: anthropic (>=0.49.0,<0.50.0)
14
15
  Requires-Dist: diskcache (>=5.6.3,<6.0.0)
15
16
  Requires-Dist: neo4j (>=5.23.0,<6.0.0)
16
17
  Requires-Dist: numpy (>=1.0.0)
@@ -209,6 +210,22 @@ The `server` directory contains an API service for interacting with the Graphiti
209
210
 
210
211
  Please see the [server README](./server/README.md) for more information.
211
212
 
213
+ ## MCP Server
214
+
215
+ The `mcp_server` directory contains a Model Context Protocol (MCP) server implementation for Graphiti. This server allows AI assistants to interact with Graphiti's knowledge graph capabilities through the MCP protocol.
216
+
217
+ Key features of the MCP server include:
218
+
219
+ - Episode management (add, retrieve, delete)
220
+ - Entity management and relationship handling
221
+ - Semantic and hybrid search capabilities
222
+ - Group management for organizing related data
223
+ - Graph maintenance operations
224
+
225
+ The MCP server can be deployed using Docker with Neo4j, making it easy to integrate Graphiti into your AI assistant workflows.
226
+
227
+ For detailed setup instructions and usage examples, see the [MCP server README](./mcp_server/README.md).
228
+
212
229
  ## Optional Environment Variables
213
230
 
214
231
  In addition to the Neo4j and OpenAi-compatible credentials, Graphiti also has a few optional environment variables.
@@ -282,7 +299,7 @@ Graphiti is under active development. We aim to maintain API stability while wor
282
299
  - Allow developers to provide their own defined node and edge classes when ingesting episodes
283
300
  - Enable more flexible knowledge representation tailored to specific use cases
284
301
  - [x] Enhancing retrieval capabilities with more robust and configurable options
285
- - [ ] Graphiti MCP Server
302
+ - [x] Graphiti MCP Server
286
303
  - [ ] Expanding test coverage to ensure reliability and catch edge cases
287
304
 
288
305
  ## Contributing
@@ -3,17 +3,17 @@ graphiti_core/cross_encoder/__init__.py,sha256=hry59vz21x-AtGZ0MJ7ugw0HTwJkXiddp
3
3
  graphiti_core/cross_encoder/bge_reranker_client.py,sha256=xgXZqB_qoaWQPjnmuf1ne38YPyhhvApySKcQDaHc9R4,1435
4
4
  graphiti_core/cross_encoder/client.py,sha256=KLsbfWKOEaAV3adFe3XZlAeb-gje9_sVKCVZTaJP3ac,1441
5
5
  graphiti_core/cross_encoder/openai_reranker_client.py,sha256=e-QCftckZ7GLQgJ-ijljw6vzVa6H417cZ3cISB70M5g,4377
6
- graphiti_core/edges.py,sha256=WXJcJp4fhdlgL4sAsh3YBqkMRAvHKmmaC_7uCPLzePI,15680
6
+ graphiti_core/edges.py,sha256=aopSSb3vG260TlW5DbbjlCosmgN6fty8guVdcQMudQo,15749
7
7
  graphiti_core/embedder/__init__.py,sha256=eWd-0sPxflnYXLoWNT9sxwCIFun5JNO9Fk4E-ZXXf8Y,164
8
8
  graphiti_core/embedder/client.py,sha256=HKIlpPLnzFT81jurPkry6z8F8nxfZVfejdcfxHVUSFU,995
9
9
  graphiti_core/embedder/openai.py,sha256=23BnPA10eiaa1HkxHKYSj75-0PymczPK2FNNIz8Txbc,1910
10
10
  graphiti_core/embedder/voyage.py,sha256=7kqrLG75J3Q6cdA2Nlx1JSYtpk2141ckdl3OtDDw0vU,1882
11
- graphiti_core/errors.py,sha256=ddHrHGQxhwkVAtSph4AV84UoOlgwZufMczXPwB7uqPo,1795
11
+ graphiti_core/errors.py,sha256=cSOXXkydihNd6OHXzkwJvciRmR7EoFMq57AzUYVg0gc,2040
12
12
  graphiti_core/graphiti.py,sha256=DP2hd1aXIYh-nXVfxugHvw7Tbax50w83IKMZf9Z5BeI,29128
13
13
  graphiti_core/helpers.py,sha256=7BQzUBFmoBDA2OIDdFtoN4W-vXOhPRIsF0uDb7PsNi0,2913
14
14
  graphiti_core/llm_client/__init__.py,sha256=PA80TSMeX-sUXITXEAxMDEt3gtfZgcJrGJUcyds1mSo,207
15
- graphiti_core/llm_client/anthropic_client.py,sha256=RlD6e49XvMJsTKU0krpq46gPSFm6-hfLkkq4Sfx27BE,2574
16
- graphiti_core/llm_client/client.py,sha256=l07SpE_k18rAhK7QbPYTx3pOb2566kYauTlKIOV3rBg,5034
15
+ graphiti_core/llm_client/anthropic_client.py,sha256=dTM8rKhk9TZAU4O-0jFMivOwJvWM-gHpp5gLmuJHiGQ,2723
16
+ graphiti_core/llm_client/client.py,sha256=sqtdkySL_QnlZUyMhLvtbSPzGI1y8Ryq5a0IqOPZ2Ps,5252
17
17
  graphiti_core/llm_client/config.py,sha256=ry6ndcB0dyUDxqnGEwPBWPOjqUkHqU_n3iMyQiN3CCM,2338
18
18
  graphiti_core/llm_client/errors.py,sha256=Vk0mj2SgNDg8E8p7m1UyUaerqLPNLCDKPVsMEnOSBdQ,1028
19
19
  graphiti_core/llm_client/groq_client.py,sha256=EesX0_iFOIvvIc3ql6Xa8EOm0dbGJ_o3VpqyDM83mKg,2498
@@ -37,7 +37,7 @@ graphiti_core/prompts/invalidate_edges.py,sha256=DV2mEyIhhjc0hdKEMFLQMeG0FiUCkv_
37
37
  graphiti_core/prompts/lib.py,sha256=oxhlpGEgV15VOLEZiwirxmIJBIdfzfiyL58iyzFDskE,4254
38
38
  graphiti_core/prompts/models.py,sha256=cvx_Bv5RMFUD_5IUawYrbpOKLPHogai7_bm7YXrSz84,867
39
39
  graphiti_core/prompts/prompt_helpers.py,sha256=-9TABwIcIQUVHcNANx6wIZd-FT2DgYKyGTfx4IGYq2I,64
40
- graphiti_core/prompts/summarize_nodes.py,sha256=GxEuA1luQMCdKYYc5zHug2y2aGZhvFn4Q51kkKGM4x4,4002
40
+ graphiti_core/prompts/summarize_nodes.py,sha256=PeA1Taov5KBNNBKgrCPeF1tLg4_SMgT-Ilz2P6xbx-M,4051
41
41
  graphiti_core/py.typed,sha256=vlmmzQOt7bmeQl9L3XJP4W6Ry0iiELepnOrinKz5KQg,79
42
42
  graphiti_core/search/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
43
  graphiti_core/search/search.py,sha256=DX-tcIa0SiKI2HX-b_WdjGE74A8RLWQor4p90dJluUA,12643
@@ -52,10 +52,10 @@ graphiti_core/utils/maintenance/__init__.py,sha256=TRY3wWWu5kn3Oahk_KKhltrWnh0NA
52
52
  graphiti_core/utils/maintenance/community_operations.py,sha256=gIw1M5HGgc2c3TXag5ygPPpAv5WsG-yoC8Lhmfr6FMs,10011
53
53
  graphiti_core/utils/maintenance/edge_operations.py,sha256=tNw56vN586JYZMgie6RLRTiHZ680-kWzDIxW8ucL6SU,12780
54
54
  graphiti_core/utils/maintenance/graph_data_operations.py,sha256=qds9ALk9PhpQs1CNZTZGpi70mqJ93Y2KhIh9X2r8MUI,6533
55
- graphiti_core/utils/maintenance/node_operations.py,sha256=H0DAL2Qau4weIcEIrVVSdntVQtN_y4Hzoik6GldP1XA,15223
55
+ graphiti_core/utils/maintenance/node_operations.py,sha256=KR8pbQJm8ZyTlQp3sT9kKK7hZaAJ07ZM2Z9Pu7EKZts,15390
56
56
  graphiti_core/utils/maintenance/temporal_operations.py,sha256=RdNtubCyYhOVrvcOIq2WppHls1Q-BEjtsN8r38l-Rtc,3691
57
57
  graphiti_core/utils/maintenance/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
- graphiti_core-0.8.2.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
59
- graphiti_core-0.8.2.dist-info/METADATA,sha256=NtjJHZn-nXyZb4ojlhL5ZLPmlWn3kk7t6j5xdv9KQPE,13585
60
- graphiti_core-0.8.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
61
- graphiti_core-0.8.2.dist-info/RECORD,,
58
+ graphiti_core-0.8.4.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
59
+ graphiti_core-0.8.4.dist-info/METADATA,sha256=keHu8L-gbL0gAzbpqfdRaSXDgZPF8cuvTpfMKrP5LoU,14351
60
+ graphiti_core-0.8.4.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
61
+ graphiti_core-0.8.4.dist-info/RECORD,,