graphiti-core 0.2.0__py3-none-any.whl → 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphiti-core might be problematic. Click here for more details.

graphiti_core/graphiti.py CHANGED
@@ -59,11 +59,6 @@ from graphiti_core.utils.maintenance.node_operations import (
59
59
  extract_nodes,
60
60
  resolve_extracted_nodes,
61
61
  )
62
- from graphiti_core.utils.maintenance.temporal_operations import (
63
- extract_edge_dates,
64
- invalidate_edges,
65
- prepare_edges_for_invalidation,
66
- )
67
62
 
68
63
  logger = logging.getLogger(__name__)
69
64
 
@@ -180,9 +175,9 @@ class Graphiti:
180
175
  await build_indices_and_constraints(self.driver)
181
176
 
182
177
  async def retrieve_episodes(
183
- self,
184
- reference_time: datetime,
185
- last_n: int = EPISODE_WINDOW_LEN,
178
+ self,
179
+ reference_time: datetime,
180
+ last_n: int = EPISODE_WINDOW_LEN,
186
181
  ) -> list[EpisodicNode]:
187
182
  """
188
183
  Retrieve the last n episodic nodes from the graph.
@@ -210,14 +205,14 @@ class Graphiti:
210
205
  return await retrieve_episodes(self.driver, reference_time, last_n)
211
206
 
212
207
  async def add_episode(
213
- self,
214
- name: str,
215
- episode_body: str,
216
- source_description: str,
217
- reference_time: datetime,
218
- source: EpisodeType = EpisodeType.message,
219
- success_callback: Callable | None = None,
220
- error_callback: Callable | None = None,
208
+ self,
209
+ name: str,
210
+ episode_body: str,
211
+ source_description: str,
212
+ reference_time: datetime,
213
+ source: EpisodeType = EpisodeType.message,
214
+ success_callback: Callable | None = None,
215
+ error_callback: Callable | None = None,
221
216
  ):
222
217
  """
223
218
  Process an episode and update the graph.
@@ -293,7 +288,7 @@ class Graphiti:
293
288
  *[node.generate_name_embedding(embedder) for node in extracted_nodes]
294
289
  )
295
290
 
296
- # Resolve extracted nodes with nodes already in the graph
291
+ # Resolve extracted nodes with nodes already in the graph and extract facts
297
292
  existing_nodes_lists: list[list[EntityNode]] = list(
298
293
  await asyncio.gather(
299
294
  *[get_relevant_nodes([node], self.driver) for node in extracted_nodes]
@@ -302,99 +297,96 @@ class Graphiti:
302
297
 
303
298
  logger.info(f'Extracted nodes: {[(n.name, n.uuid) for n in extracted_nodes]}')
304
299
 
305
- mentioned_nodes, _ = await resolve_extracted_nodes(
306
- self.llm_client, extracted_nodes, existing_nodes_lists
300
+ (mentioned_nodes, uuid_map), extracted_edges = await asyncio.gather(
301
+ resolve_extracted_nodes(self.llm_client, extracted_nodes, existing_nodes_lists),
302
+ extract_edges(self.llm_client, episode, extracted_nodes, previous_episodes),
307
303
  )
308
304
  logger.info(f'Adjusted mentioned nodes: {[(n.name, n.uuid) for n in mentioned_nodes]}')
309
305
  nodes.extend(mentioned_nodes)
310
306
 
311
- # Extract facts as edges given entity nodes
312
- extracted_edges = await extract_edges(
313
- self.llm_client, episode, mentioned_nodes, previous_episodes
307
+ extracted_edges_with_resolved_pointers = resolve_edge_pointers(
308
+ extracted_edges, uuid_map
314
309
  )
315
310
 
316
311
  # calculate embeddings
317
- await asyncio.gather(*[edge.generate_embedding(embedder) for edge in extracted_edges])
312
+ await asyncio.gather(
313
+ *[
314
+ edge.generate_embedding(embedder)
315
+ for edge in extracted_edges_with_resolved_pointers
316
+ ]
317
+ )
318
318
 
319
- # Resolve extracted edges with edges already in the graph
320
- existing_edges_list: list[list[EntityEdge]] = list(
319
+ # Resolve extracted edges with related edges already in the graph
320
+ related_edges_list: list[list[EntityEdge]] = list(
321
321
  await asyncio.gather(
322
322
  *[
323
323
  get_relevant_edges(
324
- [edge],
325
324
  self.driver,
326
- RELEVANT_SCHEMA_LIMIT,
325
+ [edge],
327
326
  edge.source_node_uuid,
328
327
  edge.target_node_uuid,
328
+ RELEVANT_SCHEMA_LIMIT,
329
329
  )
330
- for edge in extracted_edges
330
+ for edge in extracted_edges_with_resolved_pointers
331
331
  ]
332
332
  )
333
333
  )
334
334
  logger.info(
335
- f'Existing edges lists: {[(e.name, e.uuid) for edges_lst in existing_edges_list for e in edges_lst]}'
335
+ f'Related edges lists: {[(e.name, e.uuid) for edges_lst in related_edges_list for e in edges_lst]}'
336
336
  )
337
- logger.info(f'Extracted edges: {[(e.name, e.uuid) for e in extracted_edges]}')
338
-
339
- deduped_edges: list[EntityEdge] = await resolve_extracted_edges(
340
- self.llm_client, extracted_edges, existing_edges_list
337
+ logger.info(
338
+ f'Extracted edges: {[(e.name, e.uuid) for e in extracted_edges_with_resolved_pointers]}'
341
339
  )
342
340
 
343
- # Extract dates for the newly extracted edges
344
- edge_dates = await asyncio.gather(
345
- *[
346
- extract_edge_dates(
347
- self.llm_client,
348
- edge,
349
- episode,
350
- previous_episodes,
351
- )
352
- for edge in deduped_edges
353
- ]
341
+ existing_source_edges_list: list[list[EntityEdge]] = list(
342
+ await asyncio.gather(
343
+ *[
344
+ get_relevant_edges(
345
+ self.driver,
346
+ [edge],
347
+ edge.source_node_uuid,
348
+ None,
349
+ RELEVANT_SCHEMA_LIMIT,
350
+ )
351
+ for edge in extracted_edges_with_resolved_pointers
352
+ ]
353
+ )
354
354
  )
355
355
 
356
- for i, edge in enumerate(deduped_edges):
357
- valid_at = edge_dates[i][0]
358
- invalid_at = edge_dates[i][1]
359
-
360
- edge.valid_at = valid_at
361
- edge.invalid_at = invalid_at
362
- if edge.invalid_at is not None:
363
- edge.expired_at = now
364
-
365
- entity_edges.extend(deduped_edges)
356
+ existing_target_edges_list: list[list[EntityEdge]] = list(
357
+ await asyncio.gather(
358
+ *[
359
+ get_relevant_edges(
360
+ self.driver,
361
+ [edge],
362
+ None,
363
+ edge.target_node_uuid,
364
+ RELEVANT_SCHEMA_LIMIT,
365
+ )
366
+ for edge in extracted_edges_with_resolved_pointers
367
+ ]
368
+ )
369
+ )
366
370
 
367
- existing_edges: list[EntityEdge] = [
368
- e for edge_lst in existing_edges_list for e in edge_lst
371
+ existing_edges_list: list[list[EntityEdge]] = [
372
+ source_lst + target_lst
373
+ for source_lst, target_lst in zip(
374
+ existing_source_edges_list, existing_target_edges_list
375
+ )
369
376
  ]
370
377
 
371
- (
372
- old_edges_with_nodes_pending_invalidation,
373
- new_edges_with_nodes,
374
- ) = prepare_edges_for_invalidation(
375
- existing_edges=existing_edges, new_edges=deduped_edges, nodes=nodes
376
- )
377
-
378
- invalidated_edges = await invalidate_edges(
378
+ resolved_edges, invalidated_edges = await resolve_extracted_edges(
379
379
  self.llm_client,
380
- old_edges_with_nodes_pending_invalidation,
381
- new_edges_with_nodes,
380
+ extracted_edges_with_resolved_pointers,
381
+ related_edges_list,
382
+ existing_edges_list,
382
383
  episode,
383
384
  previous_episodes,
384
385
  )
385
386
 
386
- for edge in invalidated_edges:
387
- for existing_edge in existing_edges:
388
- if existing_edge.uuid == edge.uuid:
389
- existing_edge.expired_at = edge.expired_at
390
- for deduped_edge in deduped_edges:
391
- if deduped_edge.uuid == edge.uuid:
392
- deduped_edge.expired_at = edge.expired_at
393
- logger.info(f'Invalidated edges: {[(e.name, e.uuid) for e in invalidated_edges]}')
394
-
395
- entity_edges.extend(existing_edges)
387
+ entity_edges.extend(resolved_edges + invalidated_edges)
396
388
 
397
- logger.info(f'Deduped edges: {[(e.name, e.uuid) for e in deduped_edges]}')
389
+ logger.info(f'Resolved edges: {[(e.name, e.uuid) for e in resolved_edges]}')
398
390
 
399
391
  episodic_edges: list[EpisodicEdge] = build_episodic_edges(
400
392
  mentioned_nodes,
@@ -422,8 +414,8 @@ class Graphiti:
422
414
  raise e
423
415
 
424
416
  async def add_episode_bulk(
425
- self,
426
- bulk_episodes: list[RawEpisode],
417
+ self,
418
+ bulk_episodes: list[RawEpisode],
427
419
  ):
428
420
  """
429
421
  Process multiple episodes in bulk and update the graph.
@@ -587,18 +579,18 @@ class Graphiti:
587
579
  return edges
588
580
 
589
581
  async def _search(
590
- self,
591
- query: str,
592
- timestamp: datetime,
593
- config: SearchConfig,
594
- center_node_uuid: str | None = None,
582
+ self,
583
+ query: str,
584
+ timestamp: datetime,
585
+ config: SearchConfig,
586
+ center_node_uuid: str | None = None,
595
587
  ):
596
588
  return await hybrid_search(
597
589
  self.driver, self.llm_client.get_embedder(), query, timestamp, config, center_node_uuid
598
590
  )
599
591
 
600
592
  async def get_nodes_by_query(
601
- self, query: str, limit: int = RELEVANT_SCHEMA_LIMIT
593
+ self, query: str, limit: int = RELEVANT_SCHEMA_LIMIT
602
594
  ) -> list[EntityNode]:
603
595
  """
604
596
  Retrieve nodes from the graph database based on a text query.
graphiti_core/nodes.py CHANGED
@@ -225,7 +225,8 @@ class EntityNode(Node):
225
225
  MATCH (n:Entity {uuid: $uuid})
226
226
  RETURN
227
227
  n.uuid As uuid,
228
- n.name AS name,
228
+ n.name AS name,
229
+ n.name_embedding AS name_embedding,
229
230
  n.created_at AS created_at,
230
231
  n.summary AS summary
231
232
  """,
@@ -239,6 +240,7 @@ class EntityNode(Node):
239
240
  EntityNode(
240
241
  uuid=record['uuid'],
241
242
  name=record['name'],
243
+ name_embedding=record['name_embedding'],
242
244
  labels=['Entity'],
243
245
  created_at=record['created_at'].to_native(),
244
246
  summary=record['summary'],
@@ -248,3 +250,6 @@ class EntityNode(Node):
248
250
  logger.info(f'Found Node: {uuid}')
249
251
 
250
252
  return nodes[0]
253
+
254
+
255
+ # Node helpers
@@ -129,7 +129,7 @@ def v3(context: dict[str, Any]) -> list[Message]:
129
129
  Given the following context, determine whether the New Edge represents any of the edges in the list of Existing Edges.
130
130
 
131
131
  Existing Edges:
132
- {json.dumps(context['existing_edges'], indent=2)}
132
+ {json.dumps(context['related_edges'], indent=2)}
133
133
 
134
134
  New Edge:
135
135
  {json.dumps(context['extracted_edges'], indent=2)}
@@ -21,10 +21,12 @@ from .models import Message, PromptFunction, PromptVersion
21
21
 
22
22
  class Prompt(Protocol):
23
23
  v1: PromptVersion
24
+ v2: PromptVersion
24
25
 
25
26
 
26
27
  class Versions(TypedDict):
27
28
  v1: PromptFunction
29
+ v2: PromptFunction
28
30
 
29
31
 
30
32
  def v1(context: dict[str, Any]) -> list[Message]:
@@ -71,4 +73,38 @@ def v1(context: dict[str, Any]) -> list[Message]:
71
73
  ]
72
74
 
73
75
 
74
- versions: Versions = {'v1': v1}
76
+ def v2(context: dict[str, Any]) -> list[Message]:
77
+ return [
78
+ Message(
79
+ role='system',
80
+ content='You are an AI assistant that helps determine which relationships in a knowledge graph should be invalidated based solely on explicit contradictions in newer information.',
81
+ ),
82
+ Message(
83
+ role='user',
84
+ content=f"""
85
+ Based on the provided Existing Edges and a New Edge, determine which existing edges, if any, should be marked as invalidated due to contradictions with the New Edge.
86
+
87
+ Existing Edges:
88
+ {context['existing_edges']}
89
+
90
+ New Edge:
91
+ {context['new_edge']}
92
+
93
+
94
+ For each existing edge that should be invalidated, respond with a JSON object in the following format:
95
+ {{
96
+ "invalidated_edges": [
97
+ {{
98
+ "uuid": "The UUID of the edge to be invalidated",
99
+ "fact": "Updated fact of the edge"
100
+ }}
101
+ ]
102
+ }}
103
+
104
+ If no relationships need to be invalidated based on these strict criteria, return an empty list for "invalidated_edges".
105
+ """,
106
+ ),
107
+ ]
108
+
109
+
110
+ versions: Versions = {'v1': v1, 'v2': v2}
@@ -83,7 +83,7 @@ async def hybrid_search(
83
83
  nodes.extend(await get_mentioned_nodes(driver, episodes))
84
84
 
85
85
  if SearchMethod.bm25 in config.search_methods:
86
- text_search = await edge_fulltext_search(driver, query, 2 * config.num_edges)
86
+ text_search = await edge_fulltext_search(driver, query, None, None, 2 * config.num_edges)
87
87
  search_results.append(text_search)
88
88
 
89
89
  if SearchMethod.cosine_similarity in config.search_methods:
@@ -95,7 +95,7 @@ async def hybrid_search(
95
95
  )
96
96
 
97
97
  similarity_search = await edge_similarity_search(
98
- driver, search_vector, 2 * config.num_edges
98
+ driver, search_vector, None, None, 2 * config.num_edges
99
99
  )
100
100
  search_results.append(similarity_search)
101
101
 
@@ -1,11 +1,11 @@
1
1
  import asyncio
2
2
  import logging
3
3
  import re
4
- import typing
5
4
  from collections import defaultdict
6
5
  from time import time
6
+ from typing import Any
7
7
 
8
- from neo4j import AsyncDriver
8
+ from neo4j import AsyncDriver, Query
9
9
 
10
10
  from graphiti_core.edges import EntityEdge
11
11
  from graphiti_core.helpers import parse_db_date
@@ -23,7 +23,8 @@ async def get_mentioned_nodes(driver: AsyncDriver, episodes: list[EpisodicNode])
23
23
  MATCH (episode:Episodic)-[:MENTIONS]->(n:Entity) WHERE episode.uuid IN $uuids
24
24
  RETURN DISTINCT
25
25
  n.uuid As uuid,
26
- n.name AS name,
26
+ n.name AS name,
27
+ n.name_embedding AS name_embedding
27
28
  n.created_at AS created_at,
28
29
  n.summary AS summary
29
30
  """,
@@ -37,6 +38,7 @@ async def get_mentioned_nodes(driver: AsyncDriver, episodes: list[EpisodicNode])
37
38
  EntityNode(
38
39
  uuid=record['uuid'],
39
40
  name=record['name'],
41
+ name_embedding=record['name_embedding'],
40
42
  labels=['Entity'],
41
43
  created_at=record['created_at'].to_native(),
42
44
  summary=record['summary'],
@@ -66,12 +68,12 @@ async def bfs(node_ids: list[str], driver: AsyncDriver):
66
68
  r.expired_at AS expired_at,
67
69
  r.valid_at AS valid_at,
68
70
  r.invalid_at AS invalid_at
69
-
71
+
70
72
  """,
71
73
  node_ids=node_ids,
72
74
  )
73
75
 
74
- context: dict[str, typing.Any] = {}
76
+ context: dict[str, Any] = {}
75
77
 
76
78
  for record in records:
77
79
  n_uuid = record['source_node_uuid']
@@ -98,13 +100,12 @@ async def bfs(node_ids: list[str], driver: AsyncDriver):
98
100
  async def edge_similarity_search(
99
101
  driver: AsyncDriver,
100
102
  search_vector: list[float],
103
+ source_node_uuid: str | None,
104
+ target_node_uuid: str | None,
101
105
  limit: int = RELEVANT_SCHEMA_LIMIT,
102
- source_node_uuid: str = '*',
103
- target_node_uuid: str = '*',
104
106
  ) -> list[EntityEdge]:
105
107
  # vector similarity search over embedded facts
106
- records, _, _ = await driver.execute_query(
107
- """
108
+ query = Query("""
108
109
  CALL db.index.vector.queryRelationships("fact_embedding", $limit, $search_vector)
109
110
  YIELD relationship AS rel, score
110
111
  MATCH (n:Entity {uuid: $source_uuid})-[r {uuid: rel.uuid}]-(m:Entity {uuid: $target_uuid})
@@ -121,7 +122,68 @@ async def edge_similarity_search(
121
122
  r.valid_at AS valid_at,
122
123
  r.invalid_at AS invalid_at
123
124
  ORDER BY score DESC
124
- """,
125
+ """)
126
+
127
+ if source_node_uuid is None and target_node_uuid is None:
128
+ query = Query("""
129
+ CALL db.index.vector.queryRelationships("fact_embedding", $limit, $search_vector)
130
+ YIELD relationship AS rel, score
131
+ MATCH (n:Entity)-[r {uuid: rel.uuid}]-(m:Entity)
132
+ RETURN
133
+ r.uuid AS uuid,
134
+ n.uuid AS source_node_uuid,
135
+ m.uuid AS target_node_uuid,
136
+ r.created_at AS created_at,
137
+ r.name AS name,
138
+ r.fact AS fact,
139
+ r.fact_embedding AS fact_embedding,
140
+ r.episodes AS episodes,
141
+ r.expired_at AS expired_at,
142
+ r.valid_at AS valid_at,
143
+ r.invalid_at AS invalid_at
144
+ ORDER BY score DESC
145
+ """)
146
+ elif source_node_uuid is None:
147
+ query = Query("""
148
+ CALL db.index.vector.queryRelationships("fact_embedding", $limit, $search_vector)
149
+ YIELD relationship AS rel, score
150
+ MATCH (n:Entity)-[r {uuid: rel.uuid}]-(m:Entity {uuid: $target_uuid})
151
+ RETURN
152
+ r.uuid AS uuid,
153
+ n.uuid AS source_node_uuid,
154
+ m.uuid AS target_node_uuid,
155
+ r.created_at AS created_at,
156
+ r.name AS name,
157
+ r.fact AS fact,
158
+ r.fact_embedding AS fact_embedding,
159
+ r.episodes AS episodes,
160
+ r.expired_at AS expired_at,
161
+ r.valid_at AS valid_at,
162
+ r.invalid_at AS invalid_at
163
+ ORDER BY score DESC
164
+ """)
165
+ elif target_node_uuid is None:
166
+ query = Query("""
167
+ CALL db.index.vector.queryRelationships("fact_embedding", $limit, $search_vector)
168
+ YIELD relationship AS rel, score
169
+ MATCH (n:Entity {uuid: $source_uuid})-[r {uuid: rel.uuid}]-(m:Entity)
170
+ RETURN
171
+ r.uuid AS uuid,
172
+ n.uuid AS source_node_uuid,
173
+ m.uuid AS target_node_uuid,
174
+ r.created_at AS created_at,
175
+ r.name AS name,
176
+ r.fact AS fact,
177
+ r.fact_embedding AS fact_embedding,
178
+ r.episodes AS episodes,
179
+ r.expired_at AS expired_at,
180
+ r.valid_at AS valid_at,
181
+ r.invalid_at AS invalid_at
182
+ ORDER BY score DESC
183
+ """)
184
+
185
+ records, _, _ = await driver.execute_query(
186
+ query,
125
187
  search_vector=search_vector,
126
188
  source_uuid=source_node_uuid,
127
189
  target_uuid=target_node_uuid,
@@ -161,6 +223,7 @@ async def entity_similarity_search(
161
223
  RETURN
162
224
  n.uuid As uuid,
163
225
  n.name AS name,
226
+ n.name_embedding AS name_embedding,
164
227
  n.created_at AS created_at,
165
228
  n.summary AS summary
166
229
  ORDER BY score DESC
@@ -175,6 +238,7 @@ async def entity_similarity_search(
175
238
  EntityNode(
176
239
  uuid=record['uuid'],
177
240
  name=record['name'],
241
+ name_embedding=record['name_embedding'],
178
242
  labels=['Entity'],
179
243
  created_at=record['created_at'].to_native(),
180
244
  summary=record['summary'],
@@ -193,8 +257,9 @@ async def entity_fulltext_search(
193
257
  """
194
258
  CALL db.index.fulltext.queryNodes("name_and_summary", $query) YIELD node, score
195
259
  RETURN
196
- node.uuid As uuid,
260
+ node.uuid AS uuid,
197
261
  node.name AS name,
262
+ node.name_embedding AS name_embedding,
198
263
  node.created_at AS created_at,
199
264
  node.summary AS summary
200
265
  ORDER BY score DESC
@@ -210,6 +275,7 @@ async def entity_fulltext_search(
210
275
  EntityNode(
211
276
  uuid=record['uuid'],
212
277
  name=record['name'],
278
+ name_embedding=record['name_embedding'],
213
279
  labels=['Entity'],
214
280
  created_at=record['created_at'].to_native(),
215
281
  summary=record['summary'],
@@ -222,19 +288,16 @@ async def entity_fulltext_search(
222
288
  async def edge_fulltext_search(
223
289
  driver: AsyncDriver,
224
290
  query: str,
291
+ source_node_uuid: str | None,
292
+ target_node_uuid: str | None,
225
293
  limit=RELEVANT_SCHEMA_LIMIT,
226
- source_node_uuid: str = '*',
227
- target_node_uuid: str = '*',
228
294
  ) -> list[EntityEdge]:
229
295
  # fulltext search over facts
230
- fuzzy_query = re.sub(r'[^\w\s]', '', query) + '~'
231
-
232
- records, _, _ = await driver.execute_query(
233
- """
234
- CALL db.index.fulltext.queryRelationships("name_and_fact", $query)
235
- YIELD relationship AS rel, score
236
- MATCH (n:Entity {uuid: $source_uuid})-[r {uuid: rel.uuid}]-(m:Entity {uuid: $target_uuid})
237
- RETURN
296
+ cypher_query = Query("""
297
+ CALL db.index.fulltext.queryRelationships("name_and_fact", $query)
298
+ YIELD relationship AS rel, score
299
+ MATCH (n:Entity {uuid: $source_uuid})-[r {uuid: rel.uuid}]-(m:Entity {uuid: $target_uuid})
300
+ RETURN
238
301
  r.uuid AS uuid,
239
302
  n.uuid AS source_node_uuid,
240
303
  m.uuid AS target_node_uuid,
@@ -247,7 +310,70 @@ async def edge_fulltext_search(
247
310
  r.valid_at AS valid_at,
248
311
  r.invalid_at AS invalid_at
249
312
  ORDER BY score DESC LIMIT $limit
250
- """,
313
+ """)
314
+
315
+ if source_node_uuid is None and target_node_uuid is None:
316
+ cypher_query = Query("""
317
+ CALL db.index.fulltext.queryRelationships("name_and_fact", $query)
318
+ YIELD relationship AS rel, score
319
+ MATCH (n:Entity)-[r {uuid: rel.uuid}]-(m:Entity)
320
+ RETURN
321
+ r.uuid AS uuid,
322
+ n.uuid AS source_node_uuid,
323
+ m.uuid AS target_node_uuid,
324
+ r.created_at AS created_at,
325
+ r.name AS name,
326
+ r.fact AS fact,
327
+ r.fact_embedding AS fact_embedding,
328
+ r.episodes AS episodes,
329
+ r.expired_at AS expired_at,
330
+ r.valid_at AS valid_at,
331
+ r.invalid_at AS invalid_at
332
+ ORDER BY score DESC LIMIT $limit
333
+ """)
334
+ elif source_node_uuid is None:
335
+ cypher_query = Query("""
336
+ CALL db.index.fulltext.queryRelationships("name_and_fact", $query)
337
+ YIELD relationship AS rel, score
338
+ MATCH (n:Entity)-[r {uuid: rel.uuid}]-(m:Entity {uuid: $target_uuid})
339
+ RETURN
340
+ r.uuid AS uuid,
341
+ n.uuid AS source_node_uuid,
342
+ m.uuid AS target_node_uuid,
343
+ r.created_at AS created_at,
344
+ r.name AS name,
345
+ r.fact AS fact,
346
+ r.fact_embedding AS fact_embedding,
347
+ r.episodes AS episodes,
348
+ r.expired_at AS expired_at,
349
+ r.valid_at AS valid_at,
350
+ r.invalid_at AS invalid_at
351
+ ORDER BY score DESC LIMIT $limit
352
+ """)
353
+ elif target_node_uuid is None:
354
+ cypher_query = Query("""
355
+ CALL db.index.fulltext.queryRelationships("name_and_fact", $query)
356
+ YIELD relationship AS rel, score
357
+ MATCH (n:Entity {uuid: $source_uuid})-[r {uuid: rel.uuid}]-(m:Entity)
358
+ RETURN
359
+ r.uuid AS uuid,
360
+ n.uuid AS source_node_uuid,
361
+ m.uuid AS target_node_uuid,
362
+ r.created_at AS created_at,
363
+ r.name AS name,
364
+ r.fact AS fact,
365
+ r.fact_embedding AS fact_embedding,
366
+ r.episodes AS episodes,
367
+ r.expired_at AS expired_at,
368
+ r.valid_at AS valid_at,
369
+ r.invalid_at AS invalid_at
370
+ ORDER BY score DESC LIMIT $limit
371
+ """)
372
+
373
+ fuzzy_query = re.sub(r'[^\w\s]', '', query) + '~'
374
+
375
+ records, _, _ = await driver.execute_query(
376
+ cypher_query,
251
377
  query=fuzzy_query,
252
378
  source_uuid=source_node_uuid,
253
379
  target_uuid=target_node_uuid,
@@ -286,7 +412,7 @@ async def hybrid_node_search(
286
412
  Perform a hybrid search for nodes using both text queries and embeddings.
287
413
 
288
414
  This method combines fulltext search and vector similarity search to find
289
- relevant nodes in the graph database. It uses an rrf reranker.
415
+ relevant nodes in the graph database. It uses a rrf reranker.
290
416
 
291
417
  Parameters
292
418
  ----------
@@ -379,11 +505,11 @@ async def get_relevant_nodes(
379
505
 
380
506
 
381
507
  async def get_relevant_edges(
382
- edges: list[EntityEdge],
383
508
  driver: AsyncDriver,
509
+ edges: list[EntityEdge],
510
+ source_node_uuid: str | None,
511
+ target_node_uuid: str | None,
384
512
  limit: int = RELEVANT_SCHEMA_LIMIT,
385
- source_node_uuid: str = '*',
386
- target_node_uuid: str = '*',
387
513
  ) -> list[EntityEdge]:
388
514
  start = time()
389
515
  relevant_edges: list[EntityEdge] = []
@@ -392,13 +518,13 @@ async def get_relevant_edges(
392
518
  results = await asyncio.gather(
393
519
  *[
394
520
  edge_similarity_search(
395
- driver, edge.fact_embedding, limit, source_node_uuid, target_node_uuid
521
+ driver, edge.fact_embedding, source_node_uuid, target_node_uuid, limit
396
522
  )
397
523
  for edge in edges
398
524
  if edge.fact_embedding is not None
399
525
  ],
400
526
  *[
401
- edge_fulltext_search(driver, edge.fact, limit, source_node_uuid, target_node_uuid)
527
+ edge_fulltext_search(driver, edge.fact, source_node_uuid, target_node_uuid, limit)
402
528
  for edge in edges
403
529
  ],
404
530
  )
@@ -440,7 +566,7 @@ async def node_distance_reranker(
440
566
  scores: dict[str, float] = {}
441
567
 
442
568
  for uuid in sorted_uuids:
443
- # Find shortest path to center node
569
+ # Find the shortest path to center node
444
570
  records, _, _ = await driver.execute_query(
445
571
  """
446
572
  MATCH (source:Entity)-[r:RELATES_TO {uuid: $edge_uuid}]->(target:Entity)
@@ -158,7 +158,7 @@ async def dedupe_edges_bulk(
158
158
 
159
159
  relevant_edges_chunks: list[list[EntityEdge]] = list(
160
160
  await asyncio.gather(
161
- *[get_relevant_edges(edge_chunk, driver) for edge_chunk in edge_chunks]
161
+ *[get_relevant_edges(driver, edge_chunk, None, None) for edge_chunk in edge_chunks]
162
162
  )
163
163
  )
164
164
 
@@ -24,6 +24,10 @@ from graphiti_core.edges import EntityEdge, EpisodicEdge
24
24
  from graphiti_core.llm_client import LLMClient
25
25
  from graphiti_core.nodes import EntityNode, EpisodicNode
26
26
  from graphiti_core.prompts import prompt_library
27
+ from graphiti_core.utils.maintenance.temporal_operations import (
28
+ extract_edge_dates,
29
+ get_edge_contradictions,
30
+ )
27
31
 
28
32
  logger = logging.getLogger(__name__)
29
33
 
@@ -149,28 +153,110 @@ async def dedupe_extracted_edges(
149
153
  async def resolve_extracted_edges(
150
154
  llm_client: LLMClient,
151
155
  extracted_edges: list[EntityEdge],
156
+ related_edges_lists: list[list[EntityEdge]],
152
157
  existing_edges_lists: list[list[EntityEdge]],
153
- ) -> list[EntityEdge]:
154
- resolved_edges: list[EntityEdge] = list(
158
+ current_episode: EpisodicNode,
159
+ previous_episodes: list[EpisodicNode],
160
+ ) -> tuple[list[EntityEdge], list[EntityEdge]]:
161
+ # resolve edges with related edges in the graph, extract temporal information, and find invalidation candidates
162
+ results: list[tuple[EntityEdge, list[EntityEdge]]] = list(
155
163
  await asyncio.gather(
156
164
  *[
157
- resolve_extracted_edge(llm_client, extracted_edge, existing_edges)
158
- for extracted_edge, existing_edges in zip(extracted_edges, existing_edges_lists)
165
+ resolve_extracted_edge(
166
+ llm_client,
167
+ extracted_edge,
168
+ related_edges,
169
+ existing_edges,
170
+ current_episode,
171
+ previous_episodes,
172
+ )
173
+ for extracted_edge, related_edges, existing_edges in zip(
174
+ extracted_edges, related_edges_lists, existing_edges_lists
175
+ )
159
176
  ]
160
177
  )
161
178
  )
162
179
 
163
- return resolved_edges
180
+ resolved_edges: list[EntityEdge] = []
181
+ invalidated_edges: list[EntityEdge] = []
182
+ for result in results:
183
+ resolved_edge = result[0]
184
+ invalidated_edge_chunk = result[1]
185
+
186
+ resolved_edges.append(resolved_edge)
187
+ invalidated_edges.extend(invalidated_edge_chunk)
188
+
189
+ return resolved_edges, invalidated_edges
164
190
 
165
191
 
166
192
  async def resolve_extracted_edge(
167
- llm_client: LLMClient, extracted_edge: EntityEdge, existing_edges: list[EntityEdge]
193
+ llm_client: LLMClient,
194
+ extracted_edge: EntityEdge,
195
+ related_edges: list[EntityEdge],
196
+ existing_edges: list[EntityEdge],
197
+ current_episode: EpisodicNode,
198
+ previous_episodes: list[EpisodicNode],
199
+ ) -> tuple[EntityEdge, list[EntityEdge]]:
200
+ resolved_edge, (valid_at, invalid_at), invalidation_candidates = await asyncio.gather(
201
+ dedupe_extracted_edge(llm_client, extracted_edge, related_edges),
202
+ extract_edge_dates(llm_client, extracted_edge, current_episode, previous_episodes),
203
+ get_edge_contradictions(llm_client, extracted_edge, existing_edges),
204
+ )
205
+
206
+ now = datetime.now()
207
+
208
+ resolved_edge.valid_at = valid_at if valid_at is not None else resolved_edge.valid_at
209
+ resolved_edge.invalid_at = invalid_at if invalid_at is not None else resolved_edge.invalid_at
210
+ if invalid_at is not None and resolved_edge.expired_at is None:
211
+ resolved_edge.expired_at = now
212
+
213
+ # Determine if the new_edge needs to be expired
214
+ if resolved_edge.expired_at is None:
215
+ invalidation_candidates.sort(key=lambda c: (c.valid_at is None, c.valid_at))
216
+ for candidate in invalidation_candidates:
217
+ if (
218
+ candidate.valid_at is not None and resolved_edge.valid_at is not None
219
+ ) and candidate.valid_at > resolved_edge.valid_at:
220
+ # Expire new edge since we have information about more recent events
221
+ resolved_edge.invalid_at = candidate.valid_at
222
+ resolved_edge.expired_at = now
223
+ break
224
+
225
+ # Determine which contradictory edges need to be expired
226
+ invalidated_edges: list[EntityEdge] = []
227
+ for edge in invalidation_candidates:
228
+ # (Edge invalid before new edge becomes valid) or (new edge invalid before edge becomes valid)
229
+ if (
230
+ edge.invalid_at is not None
231
+ and resolved_edge.valid_at is not None
232
+ and edge.invalid_at < resolved_edge.valid_at
233
+ ) or (
234
+ edge.valid_at is not None
235
+ and resolved_edge.invalid_at is not None
236
+ and resolved_edge.invalid_at < edge.valid_at
237
+ ):
238
+ continue
239
+ # New edge invalidates edge
240
+ elif (
241
+ edge.valid_at is not None
242
+ and resolved_edge.valid_at is not None
243
+ and edge.valid_at < resolved_edge.valid_at
244
+ ):
245
+ edge.invalid_at = resolved_edge.valid_at
246
+ edge.expired_at = edge.expired_at if edge.expired_at is not None else now
247
+ invalidated_edges.append(edge)
248
+
249
+ return resolved_edge, invalidated_edges
250
+
251
+
252
+ async def dedupe_extracted_edge(
253
+ llm_client: LLMClient, extracted_edge: EntityEdge, related_edges: list[EntityEdge]
168
254
  ) -> EntityEdge:
169
255
  start = time()
170
256
 
171
257
  # Prepare context for LLM
172
- existing_edges_context = [
173
- {'uuid': edge.uuid, 'name': edge.name, 'fact': edge.fact} for edge in existing_edges
258
+ related_edges_context = [
259
+ {'uuid': edge.uuid, 'name': edge.name, 'fact': edge.fact} for edge in related_edges
174
260
  ]
175
261
 
176
262
  extracted_edge_context = {
@@ -180,7 +266,7 @@ async def resolve_extracted_edge(
180
266
  }
181
267
 
182
268
  context = {
183
- 'existing_edges': existing_edges_context,
269
+ 'related_edges': related_edges_context,
184
270
  'extracted_edges': extracted_edge_context,
185
271
  }
186
272
 
@@ -191,14 +277,14 @@ async def resolve_extracted_edge(
191
277
 
192
278
  edge = extracted_edge
193
279
  if is_duplicate:
194
- for existing_edge in existing_edges:
280
+ for existing_edge in related_edges:
195
281
  if existing_edge.uuid != uuid:
196
282
  continue
197
283
  edge = existing_edge
198
284
 
199
285
  end = time()
200
286
  logger.info(
201
- f'Resolved node: {extracted_edge.name} is {edge.name}, in {(end - start) * 1000} ms'
287
+ f'Resolved Edge: {extracted_edge.name} is {edge.name}, in {(end - start) * 1000} ms'
202
288
  )
203
289
 
204
290
  return edge
@@ -28,7 +28,7 @@ logger = logging.getLogger(__name__)
28
28
 
29
29
 
30
30
  async def extract_message_nodes(
31
- llm_client: LLMClient, episode: EpisodicNode, previous_episodes: list[EpisodicNode]
31
+ llm_client: LLMClient, episode: EpisodicNode, previous_episodes: list[EpisodicNode]
32
32
  ) -> list[dict[str, Any]]:
33
33
  # Prepare context for LLM
34
34
  context = {
@@ -49,8 +49,8 @@ async def extract_message_nodes(
49
49
 
50
50
 
51
51
  async def extract_json_nodes(
52
- llm_client: LLMClient,
53
- episode: EpisodicNode,
52
+ llm_client: LLMClient,
53
+ episode: EpisodicNode,
54
54
  ) -> list[dict[str, Any]]:
55
55
  # Prepare context for LLM
56
56
  context = {
@@ -67,9 +67,9 @@ async def extract_json_nodes(
67
67
 
68
68
 
69
69
  async def extract_nodes(
70
- llm_client: LLMClient,
71
- episode: EpisodicNode,
72
- previous_episodes: list[EpisodicNode],
70
+ llm_client: LLMClient,
71
+ episode: EpisodicNode,
72
+ previous_episodes: list[EpisodicNode],
73
73
  ) -> list[EntityNode]:
74
74
  start = time()
75
75
  extracted_node_data: list[dict[str, Any]] = []
@@ -96,9 +96,9 @@ async def extract_nodes(
96
96
 
97
97
 
98
98
  async def dedupe_extracted_nodes(
99
- llm_client: LLMClient,
100
- extracted_nodes: list[EntityNode],
101
- existing_nodes: list[EntityNode],
99
+ llm_client: LLMClient,
100
+ extracted_nodes: list[EntityNode],
101
+ existing_nodes: list[EntityNode],
102
102
  ) -> tuple[list[EntityNode], dict[str, str]]:
103
103
  start = time()
104
104
 
@@ -146,9 +146,9 @@ async def dedupe_extracted_nodes(
146
146
 
147
147
 
148
148
  async def resolve_extracted_nodes(
149
- llm_client: LLMClient,
150
- extracted_nodes: list[EntityNode],
151
- existing_nodes_lists: list[list[EntityNode]],
149
+ llm_client: LLMClient,
150
+ extracted_nodes: list[EntityNode],
151
+ existing_nodes_lists: list[list[EntityNode]],
152
152
  ) -> tuple[list[EntityNode], dict[str, str]]:
153
153
  uuid_map: dict[str, str] = {}
154
154
  resolved_nodes: list[EntityNode] = []
@@ -169,7 +169,7 @@ async def resolve_extracted_nodes(
169
169
 
170
170
 
171
171
  async def resolve_extracted_node(
172
- llm_client: LLMClient, extracted_node: EntityNode, existing_nodes: list[EntityNode]
172
+ llm_client: LLMClient, extracted_node: EntityNode, existing_nodes: list[EntityNode]
173
173
  ) -> tuple[EntityNode, dict[str, str]]:
174
174
  start = time()
175
175
 
@@ -214,8 +214,8 @@ async def resolve_extracted_node(
214
214
 
215
215
 
216
216
  async def dedupe_node_list(
217
- llm_client: LLMClient,
218
- nodes: list[EntityNode],
217
+ llm_client: LLMClient,
218
+ nodes: list[EntityNode],
219
219
  ) -> tuple[list[EntityNode], dict[str, str]]:
220
220
  start = time()
221
221
 
@@ -16,6 +16,7 @@ limitations under the License.
16
16
 
17
17
  import logging
18
18
  from datetime import datetime
19
+ from time import time
19
20
  from typing import List
20
21
 
21
22
  from graphiti_core.edges import EntityEdge
@@ -181,3 +182,36 @@ async def extract_edge_dates(
181
182
  logger.info(f'Edge date extraction explanation: {explanation}')
182
183
 
183
184
  return valid_at_datetime, invalid_at_datetime
185
+
186
+
187
+ async def get_edge_contradictions(
188
+ llm_client: LLMClient, new_edge: EntityEdge, existing_edges: list[EntityEdge]
189
+ ) -> list[EntityEdge]:
190
+ start = time()
191
+ existing_edge_map = {edge.uuid: edge for edge in existing_edges}
192
+
193
+ new_edge_context = {'uuid': new_edge.uuid, 'name': new_edge.name, 'fact': new_edge.fact}
194
+ existing_edge_context = [
195
+ {'uuid': existing_edge.uuid, 'name': existing_edge.name, 'fact': existing_edge.fact}
196
+ for existing_edge in existing_edges
197
+ ]
198
+
199
+ context = {'new_edge': new_edge_context, 'existing_edges': existing_edge_context}
200
+
201
+ llm_response = await llm_client.generate_response(prompt_library.invalidate_edges.v2(context))
202
+
203
+ contradicted_edge_data = llm_response.get('invalidated_edges', [])
204
+
205
+ contradicted_edges: list[EntityEdge] = []
206
+ for edge_data in contradicted_edge_data:
207
+ if edge_data['uuid'] in existing_edge_map:
208
+ contradicted_edge = existing_edge_map[edge_data['uuid']]
209
+ contradicted_edge.fact = edge_data['fact']
210
+ contradicted_edges.append(contradicted_edge)
211
+
212
+ end = time()
213
+ logger.info(
214
+ f'Found invalidated edge candidates from {new_edge.fact}, in {(end - start) * 1000} ms'
215
+ )
216
+
217
+ return contradicted_edges
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: graphiti-core
3
- Version: 0.2.0
3
+ Version: 0.2.2
4
4
  Summary: A temporal graph building library
5
5
  License: Apache-2.0
6
6
  Author: Paul Paliychuk
@@ -173,7 +173,9 @@ graphiti.close()
173
173
 
174
174
  ## Documentation
175
175
 
176
- Visit the Zep knowledge base for Graphiti [Guides and API documentation](https://help.getzep.com/Graphiti/Graphiti).
176
+ - [Guides and API documentation](https://help.getzep.com/graphiti).
177
+ - [Quick Start](https://help.getzep.com/graphiti/graphiti/quick-start)
178
+ - [Building an agent with LangChain's LangGraph and Graphiti](https://help.getzep.com/graphiti/graphiti/lang-graph-agent)
177
179
 
178
180
  ## Status and Roadmap
179
181
 
@@ -1,6 +1,6 @@
1
1
  graphiti_core/__init__.py,sha256=e5SWFkRiaUwfprYIeIgVIh7JDedNiloZvd3roU-0aDY,55
2
2
  graphiti_core/edges.py,sha256=Sxsqw7WZAC6YJKftMaF9t69o7HV_GM6m6ULjtLhZg0M,7484
3
- graphiti_core/graphiti.py,sha256=BuKFvBA6tqcYKlwGexKQZ0sLNavEvDRzAMo_umlFVcM,23450
3
+ graphiti_core/graphiti.py,sha256=hLIDjvbdvgQPPi1-HVyiQ1gw67jUdiaKqWRBZhtxqFc,23106
4
4
  graphiti_core/helpers.py,sha256=EAeC3RrcecjiTGN2vxergN5RHTy2_jhFXA5PQVT3toU,200
5
5
  graphiti_core/llm_client/__init__.py,sha256=f4OSk82jJ70wZ2HOuQu6-RQWkkf7HIB0FCT6xOuxZkQ,154
6
6
  graphiti_core/llm_client/anthropic_client.py,sha256=C8lOLm7in_eNfOP7s8gjMM0Y99-TzKWlGaPuVGceX68,2180
@@ -9,29 +9,29 @@ graphiti_core/llm_client/config.py,sha256=d1oZ9tt7QBQlbph7v-0HjItb6otK9_-IwF8kkR
9
9
  graphiti_core/llm_client/groq_client.py,sha256=qscr5-190wBTUCBL31EAjQTLytK9AF75-y9GsVRvGJU,2206
10
10
  graphiti_core/llm_client/openai_client.py,sha256=Bkrp_mKzAxK6kgPzv1UtVUgr1ZvvJhE2H39hgAwWrsI,2211
11
11
  graphiti_core/llm_client/utils.py,sha256=H8-Kwa5SyvIYDNIas8O4bHJ6jsOL49li44VoDEMyauY,555
12
- graphiti_core/nodes.py,sha256=lUSGkWs7EN88qQ1kwwun-t1SWNmTL4z8fOg1dOCqwl0,7879
12
+ graphiti_core/nodes.py,sha256=gB2HxaLHeLAo_wthSI8kRonTdz-BR_GJ4f6JMrxXd0c,8004
13
13
  graphiti_core/prompts/__init__.py,sha256=EA-x9xUki9l8wnu2l8ek_oNf75-do5tq5hVq7Zbv8Kw,101
14
- graphiti_core/prompts/dedupe_edges.py,sha256=FuZQVZlXTYjskaRUYblToLw4cFjyDp4ECrSf-Y8Z4sU,6530
14
+ graphiti_core/prompts/dedupe_edges.py,sha256=DUNHdIudj50FAjkla4nc68tSFSD2yjmYHBw-Bb7ph20,6529
15
15
  graphiti_core/prompts/dedupe_nodes.py,sha256=BZ9S-PB9SSGjc5Oo8ivdgA6rZx3OGOFhKtwrBlQ0bm0,7269
16
16
  graphiti_core/prompts/extract_edge_dates.py,sha256=G-Gnsyt8pYx9lFJEwlIsTdADF3ESDe26WSsrAGmvlYk,3086
17
17
  graphiti_core/prompts/extract_edges.py,sha256=AQ8xYbAv_RKXAT6WMwXs1_GvUdLtM_lhLNbt3SkOAmk,5348
18
18
  graphiti_core/prompts/extract_nodes.py,sha256=isYly1Yq9tpD-Dlj2JNvKMdsJUqjWMSO16ZFinFxWic,5304
19
- graphiti_core/prompts/invalidate_edges.py,sha256=-BJ5j73fDAhRJa1abs35rKYyo-_OSZYTlQNphfo5Kuk,2993
19
+ graphiti_core/prompts/invalidate_edges.py,sha256=8SHt3iPTdmqk8A52LxgdMtI39w4USKqVDMOS2i6lRQ4,4342
20
20
  graphiti_core/prompts/lib.py,sha256=RR8f8DQfioUK5bJonMzn02pKLxJlaENv1VocpvRJ488,3532
21
21
  graphiti_core/prompts/models.py,sha256=cvx_Bv5RMFUD_5IUawYrbpOKLPHogai7_bm7YXrSz84,867
22
22
  graphiti_core/search/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
- graphiti_core/search/search.py,sha256=kL3bTnDUwi2-yCWA9RX9JZrbEnk3FVB1RfcEw5naWtY,4414
24
- graphiti_core/search/search_utils.py,sha256=F1zA_kN3CSwkgKRUohEN0E5H7TWuC6bo879NwprKGpY,15170
23
+ graphiti_core/search/search.py,sha256=IUqAdWub2mg-j9Mz-NacJVLequsG5fxqx2SySKnQtXA,4438
24
+ graphiti_core/search/search_utils.py,sha256=MPzYTp0ybEZjDH92_1Bxwm7dz8CKHkTBcgPWDIXapg0,21135
25
25
  graphiti_core/utils/__init__.py,sha256=cJAcMnBZdHBQmWrZdU1PQ1YmaL75bhVUkyVpIPuOyns,260
26
- graphiti_core/utils/bulk_utils.py,sha256=rArgax8-OpC7MEay0BUzHXZIZKyl3luUiUm3gtrB6kc,11671
26
+ graphiti_core/utils/bulk_utils.py,sha256=xwKgHDNiGDt3-jG_YfN4vrHfG-SUxfuBnsFnBANal98,11683
27
27
  graphiti_core/utils/maintenance/__init__.py,sha256=4b9sfxqyFZMLwxxS2lnQ6_wBr3xrJRIqfAWOidK8EK0,388
28
- graphiti_core/utils/maintenance/edge_operations.py,sha256=Z9t9Rwnpjcc2obcOG6kAxeqrfQCNak54hT8do2RrITs,7201
28
+ graphiti_core/utils/maintenance/edge_operations.py,sha256=JMrMAinkGaGTzaiiCFG-HACOTnoGfJa2hhTQKhujqgM,10782
29
29
  graphiti_core/utils/maintenance/graph_data_operations.py,sha256=ggzCWezFyLC29VZBiYHvanOpSRLaPtcmbgHgcl-qHy8,5321
30
- graphiti_core/utils/maintenance/node_operations.py,sha256=K2O_8Ey2ugBW9DkEUjy4p53qeJ5mbPAEGX7XzN9HF54,8016
31
- graphiti_core/utils/maintenance/temporal_operations.py,sha256=XIo3xSYQ_4LFiRDBJ-V03l2-rYeIcq6Id0O1o53yWWY,6865
30
+ graphiti_core/utils/maintenance/node_operations.py,sha256=1Iswwoqy7HDH_CQACQUq3oQKrX7cNZb1kdkSQOawj84,7956
31
+ graphiti_core/utils/maintenance/temporal_operations.py,sha256=BzfGDm96w4HcUEsaWTHUBt5S8dNmDQL1eX6AuBL-XFM,8135
32
32
  graphiti_core/utils/maintenance/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
33
  graphiti_core/utils/utils.py,sha256=LguHvEDi9JruXKWXXHaz2f4vpezdfgY-rpxjPq0dao8,1959
34
- graphiti_core-0.2.0.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
35
- graphiti_core-0.2.0.dist-info/METADATA,sha256=ce4A0ZTcN36eNCvy0G8BbTy1l5Epr3WqEbIrgaNtOuQ,9040
36
- graphiti_core-0.2.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
37
- graphiti_core-0.2.0.dist-info/RECORD,,
34
+ graphiti_core-0.2.2.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
35
+ graphiti_core-0.2.2.dist-info/METADATA,sha256=HOn2oMZZFhh5Tz4v0fNPO45AbEp4muF4QXXFhZOb45o,9184
36
+ graphiti_core-0.2.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
37
+ graphiti_core-0.2.2.dist-info/RECORD,,