graphiti-core 0.15.1__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphiti-core might be problematic. Click here for more details.

@@ -45,15 +45,15 @@ logger = logging.getLogger(__name__)
45
45
 
46
46
  def build_episodic_edges(
47
47
  entity_nodes: list[EntityNode],
48
- episode: EpisodicNode,
48
+ episode_uuid: str,
49
49
  created_at: datetime,
50
50
  ) -> list[EpisodicEdge]:
51
51
  episodic_edges: list[EpisodicEdge] = [
52
52
  EpisodicEdge(
53
- source_node_uuid=episode.uuid,
53
+ source_node_uuid=episode_uuid,
54
54
  target_node_uuid=node.uuid,
55
55
  created_at=created_at,
56
- group_id=episode.group_id,
56
+ group_id=node.group_id,
57
57
  )
58
58
  for node in entity_nodes
59
59
  ]
@@ -68,19 +68,23 @@ def build_duplicate_of_edges(
68
68
  created_at: datetime,
69
69
  duplicate_nodes: list[tuple[EntityNode, EntityNode]],
70
70
  ) -> list[EntityEdge]:
71
- is_duplicate_of_edges: list[EntityEdge] = [
72
- EntityEdge(
73
- source_node_uuid=source_node.uuid,
74
- target_node_uuid=target_node.uuid,
75
- name='IS_DUPLICATE_OF',
76
- group_id=episode.group_id,
77
- fact=f'{source_node.name} is a duplicate of {target_node.name}',
78
- episodes=[episode.uuid],
79
- created_at=created_at,
80
- valid_at=created_at,
71
+ is_duplicate_of_edges: list[EntityEdge] = []
72
+ for source_node, target_node in duplicate_nodes:
73
+ if source_node.uuid == target_node.uuid:
74
+ continue
75
+
76
+ is_duplicate_of_edges.append(
77
+ EntityEdge(
78
+ source_node_uuid=source_node.uuid,
79
+ target_node_uuid=target_node.uuid,
80
+ name='IS_DUPLICATE_OF',
81
+ group_id=episode.group_id,
82
+ fact=f'{source_node.name} is a duplicate of {target_node.name}',
83
+ episodes=[episode.uuid],
84
+ created_at=created_at,
85
+ valid_at=created_at,
86
+ )
81
87
  )
82
- for source_node, target_node in duplicate_nodes
83
- ]
84
88
 
85
89
  return is_duplicate_of_edges
86
90
 
@@ -240,50 +244,6 @@ async def extract_edges(
240
244
  return edges
241
245
 
242
246
 
243
- async def dedupe_extracted_edges(
244
- llm_client: LLMClient,
245
- extracted_edges: list[EntityEdge],
246
- existing_edges: list[EntityEdge],
247
- ) -> list[EntityEdge]:
248
- # Create edge map
249
- edge_map: dict[str, EntityEdge] = {}
250
- for edge in existing_edges:
251
- edge_map[edge.uuid] = edge
252
-
253
- # Prepare context for LLM
254
- context = {
255
- 'extracted_edges': [
256
- {'uuid': edge.uuid, 'name': edge.name, 'fact': edge.fact} for edge in extracted_edges
257
- ],
258
- 'existing_edges': [
259
- {'uuid': edge.uuid, 'name': edge.name, 'fact': edge.fact} for edge in existing_edges
260
- ],
261
- }
262
-
263
- llm_response = await llm_client.generate_response(prompt_library.dedupe_edges.edge(context))
264
- duplicate_data = llm_response.get('duplicates', [])
265
- logger.debug(f'Extracted unique edges: {duplicate_data}')
266
-
267
- duplicate_uuid_map: dict[str, str] = {}
268
- for duplicate in duplicate_data:
269
- uuid_value = duplicate['duplicate_of']
270
- duplicate_uuid_map[duplicate['uuid']] = uuid_value
271
-
272
- # Get full edge data
273
- edges: list[EntityEdge] = []
274
- for edge in extracted_edges:
275
- if edge.uuid in duplicate_uuid_map:
276
- existing_uuid = duplicate_uuid_map[edge.uuid]
277
- existing_edge = edge_map[existing_uuid]
278
- # Add current episode to the episodes list
279
- existing_edge.episodes += edge.episodes
280
- edges.append(existing_edge)
281
- else:
282
- edges.append(edge)
283
-
284
- return edges
285
-
286
-
287
247
  async def resolve_extracted_edges(
288
248
  clients: GraphitiClients,
289
249
  extracted_edges: list[EntityEdge],
@@ -335,7 +295,7 @@ async def resolve_extracted_edges(
335
295
  edge_types_lst.append(extracted_edge_types)
336
296
 
337
297
  # resolve edges with related edges in the graph and find invalidation candidates
338
- results: list[tuple[EntityEdge, list[EntityEdge]]] = list(
298
+ results: list[tuple[EntityEdge, list[EntityEdge], list[EntityEdge]]] = list(
339
299
  await semaphore_gather(
340
300
  *[
341
301
  resolve_extracted_edge(
@@ -416,9 +376,9 @@ async def resolve_extracted_edge(
416
376
  existing_edges: list[EntityEdge],
417
377
  episode: EpisodicNode,
418
378
  edge_types: dict[str, BaseModel] | None = None,
419
- ) -> tuple[EntityEdge, list[EntityEdge]]:
379
+ ) -> tuple[EntityEdge, list[EntityEdge], list[EntityEdge]]:
420
380
  if len(related_edges) == 0 and len(existing_edges) == 0:
421
- return extracted_edge, []
381
+ return extracted_edge, [], []
422
382
 
423
383
  start = time()
424
384
 
@@ -457,15 +417,16 @@ async def resolve_extracted_edge(
457
417
  model_size=ModelSize.small,
458
418
  )
459
419
 
460
- duplicate_fact_id: int = llm_response.get('duplicate_fact_id', -1)
461
-
462
- resolved_edge = (
463
- related_edges[duplicate_fact_id]
464
- if 0 <= duplicate_fact_id < len(related_edges)
465
- else extracted_edge
420
+ duplicate_fact_ids: list[int] = list(
421
+ filter(lambda i: 0 <= i < len(related_edges), llm_response.get('duplicate_facts', []))
466
422
  )
467
423
 
468
- if duplicate_fact_id >= 0 and episode is not None:
424
+ resolved_edge = extracted_edge
425
+ for duplicate_fact_id in duplicate_fact_ids:
426
+ resolved_edge = related_edges[duplicate_fact_id]
427
+ break
428
+
429
+ if duplicate_fact_ids and episode is not None:
469
430
  resolved_edge.episodes.append(episode.uuid)
470
431
 
471
432
  contradicted_facts: list[int] = llm_response.get('contradicted_facts', [])
@@ -519,59 +480,12 @@ async def resolve_extracted_edge(
519
480
  break
520
481
 
521
482
  # Determine which contradictory edges need to be expired
522
- invalidated_edges = resolve_edge_contradictions(resolved_edge, invalidation_candidates)
523
-
524
- return resolved_edge, invalidated_edges
525
-
526
-
527
- async def dedupe_extracted_edge(
528
- llm_client: LLMClient,
529
- extracted_edge: EntityEdge,
530
- related_edges: list[EntityEdge],
531
- episode: EpisodicNode | None = None,
532
- ) -> EntityEdge:
533
- if len(related_edges) == 0:
534
- return extracted_edge
535
-
536
- start = time()
537
-
538
- # Prepare context for LLM
539
- related_edges_context = [
540
- {'id': edge.uuid, 'fact': edge.fact} for i, edge in enumerate(related_edges)
541
- ]
542
-
543
- extracted_edge_context = {
544
- 'fact': extracted_edge.fact,
545
- }
546
-
547
- context = {
548
- 'related_edges': related_edges_context,
549
- 'extracted_edges': extracted_edge_context,
550
- }
551
-
552
- llm_response = await llm_client.generate_response(
553
- prompt_library.dedupe_edges.edge(context),
554
- response_model=EdgeDuplicate,
555
- model_size=ModelSize.small,
556
- )
557
-
558
- duplicate_fact_id: int = llm_response.get('duplicate_fact_id', -1)
559
-
560
- edge = (
561
- related_edges[duplicate_fact_id]
562
- if 0 <= duplicate_fact_id < len(related_edges)
563
- else extracted_edge
564
- )
565
-
566
- if duplicate_fact_id >= 0 and episode is not None:
567
- edge.episodes.append(episode.uuid)
568
-
569
- end = time()
570
- logger.debug(
571
- f'Resolved Edge: {extracted_edge.name} is {edge.name}, in {(end - start) * 1000} ms'
483
+ invalidated_edges: list[EntityEdge] = resolve_edge_contradictions(
484
+ resolved_edge, invalidation_candidates
572
485
  )
486
+ duplicate_edges: list[EntityEdge] = [related_edges[idx] for idx in duplicate_fact_ids]
573
487
 
574
- return edge
488
+ return resolved_edge, invalidated_edges, duplicate_edges
575
489
 
576
490
 
577
491
  async def dedupe_edge_list(
@@ -176,62 +176,13 @@ async def extract_nodes(
176
176
  return extracted_nodes
177
177
 
178
178
 
179
- async def dedupe_extracted_nodes(
180
- llm_client: LLMClient,
181
- extracted_nodes: list[EntityNode],
182
- existing_nodes: list[EntityNode],
183
- ) -> tuple[list[EntityNode], dict[str, str]]:
184
- start = time()
185
-
186
- # build existing node map
187
- node_map: dict[str, EntityNode] = {}
188
- for node in existing_nodes:
189
- node_map[node.uuid] = node
190
-
191
- # Prepare context for LLM
192
- existing_nodes_context = [
193
- {'uuid': node.uuid, 'name': node.name, 'summary': node.summary} for node in existing_nodes
194
- ]
195
-
196
- extracted_nodes_context = [
197
- {'uuid': node.uuid, 'name': node.name, 'summary': node.summary} for node in extracted_nodes
198
- ]
199
-
200
- context = {
201
- 'existing_nodes': existing_nodes_context,
202
- 'extracted_nodes': extracted_nodes_context,
203
- }
204
-
205
- llm_response = await llm_client.generate_response(prompt_library.dedupe_nodes.node(context))
206
-
207
- duplicate_data = llm_response.get('duplicates', [])
208
-
209
- end = time()
210
- logger.debug(f'Deduplicated nodes: {duplicate_data} in {(end - start) * 1000} ms')
211
-
212
- uuid_map: dict[str, str] = {}
213
- for duplicate in duplicate_data:
214
- uuid_value = duplicate['duplicate_of']
215
- uuid_map[duplicate['uuid']] = uuid_value
216
-
217
- nodes: list[EntityNode] = []
218
- for node in extracted_nodes:
219
- if node.uuid in uuid_map:
220
- existing_uuid = uuid_map[node.uuid]
221
- existing_node = node_map[existing_uuid]
222
- nodes.append(existing_node)
223
- else:
224
- nodes.append(node)
225
-
226
- return nodes, uuid_map
227
-
228
-
229
179
  async def resolve_extracted_nodes(
230
180
  clients: GraphitiClients,
231
181
  extracted_nodes: list[EntityNode],
232
182
  episode: EpisodicNode | None = None,
233
183
  previous_episodes: list[EpisodicNode] | None = None,
234
184
  entity_types: dict[str, BaseModel] | None = None,
185
+ existing_nodes_override: list[EntityNode] | None = None,
235
186
  ) -> tuple[list[EntityNode], dict[str, str], list[tuple[EntityNode, EntityNode]]]:
236
187
  llm_client = clients.llm_client
237
188
  driver = clients.driver
@@ -249,9 +200,13 @@ async def resolve_extracted_nodes(
249
200
  ]
250
201
  )
251
202
 
252
- existing_nodes_dict: dict[str, EntityNode] = {
253
- node.uuid: node for result in search_results for node in result.nodes
254
- }
203
+ candidate_nodes: list[EntityNode] = (
204
+ [node for result in search_results for node in result.nodes]
205
+ if existing_nodes_override is None
206
+ else existing_nodes_override
207
+ )
208
+
209
+ existing_nodes_dict: dict[str, EntityNode] = {node.uuid: node for node in candidate_nodes}
255
210
 
256
211
  existing_nodes: list[EntityNode] = list(existing_nodes_dict.values())
257
212
 
@@ -321,13 +276,11 @@ async def resolve_extracted_nodes(
321
276
  resolved_nodes.append(resolved_node)
322
277
  uuid_map[extracted_node.uuid] = resolved_node.uuid
323
278
 
324
- additional_duplicates: list[int] = resolution.get('additional_duplicates', [])
325
- for idx in additional_duplicates:
279
+ duplicates: list[int] = resolution.get('duplicates', [])
280
+ for idx in duplicates:
326
281
  existing_node = existing_nodes[idx] if idx < len(existing_nodes) else resolved_node
327
- if existing_node == resolved_node:
328
- continue
329
282
 
330
- node_duplicates.append((resolved_node, existing_nodes[idx]))
283
+ node_duplicates.append((resolved_node, existing_node))
331
284
 
332
285
  logger.debug(f'Resolved nodes: {[(n.name, n.uuid) for n in resolved_nodes]}')
333
286
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: graphiti-core
3
- Version: 0.15.1
3
+ Version: 0.16.0
4
4
  Summary: A temporal graph building library
5
5
  Project-URL: Homepage, https://help.getzep.com/graphiti/graphiti/overview
6
6
  Project-URL: Repository, https://github.com/getzep/graphiti
@@ -265,6 +265,28 @@ In addition to the Neo4j and OpenAi-compatible credentials, Graphiti also has a
265
265
  If you are using one of our supported models, such as Anthropic or Voyage models, the necessary environment variables
266
266
  must be set.
267
267
 
268
+ ### Database Configuration
269
+
270
+ `DEFAULT_DATABASE` specifies the database name to use for graph operations. This is particularly important for Neo4j 5+ users:
271
+
272
+ - **Neo4j 5+**: The default database name is `neo4j` (not `default_db`)
273
+ - **Neo4j 4**: The default database name is `default_db`
274
+ - **FalkorDB**: The default graph name is `default_db`
275
+
276
+ If you encounter the error `Graph not found: default_db` when using Neo4j 5, set:
277
+
278
+ ```bash
279
+ export DEFAULT_DATABASE=neo4j
280
+ ```
281
+
282
+ Or add to your `.env` file:
283
+
284
+ ```
285
+ DEFAULT_DATABASE=neo4j
286
+ ```
287
+
288
+ ### Performance Configuration
289
+
268
290
  `USE_PARALLEL_RUNTIME` is an optional boolean variable that can be set to true if you wish
269
291
  to enable Neo4j's parallel runtime feature for several of our search queries.
270
292
  Note that this feature is not supported for Neo4j Community edition or for smaller AuraDB instances,
@@ -2,7 +2,7 @@ graphiti_core/__init__.py,sha256=e5SWFkRiaUwfprYIeIgVIh7JDedNiloZvd3roU-0aDY,55
2
2
  graphiti_core/edges.py,sha256=h67vyXYhZYqlwaOmaqjHiGns6nEjuBVSIAFBMveNVo8,16257
3
3
  graphiti_core/errors.py,sha256=cH_v9TPgEPeQE6GFOHIg5TvejpUCBddGarMY2Whxbwc,2707
4
4
  graphiti_core/graph_queries.py,sha256=KfWDp8xDnPa9bcHskw8NeMpeeHBtZWBCosVdu1Iwv34,7076
5
- graphiti_core/graphiti.py,sha256=TcZTJJ_4E3qt_bFNZNDy3ujFyjZLMmSYKmixN2kdqqg,33163
5
+ graphiti_core/graphiti.py,sha256=pSgJ7F0vb-avsPnE5ELqdPCs8jSJpG9_L5hrQkEbchg,35257
6
6
  graphiti_core/graphiti_types.py,sha256=rL-9bvnLobunJfXU4hkD6mAj14pofKp_wq8QsFDZwDU,1035
7
7
  graphiti_core/helpers.py,sha256=ixUOfWN_GJVRvdiK-RzgAYJD18nM1CLmLBDNmVrIboQ,4948
8
8
  graphiti_core/nodes.py,sha256=mRK5QaTzYzznAx4OSHKFFOoiyAUKJmXbjWv3ovDJ6z8,18994
@@ -10,7 +10,7 @@ graphiti_core/py.typed,sha256=vlmmzQOt7bmeQl9L3XJP4W6Ry0iiELepnOrinKz5KQg,79
10
10
  graphiti_core/cross_encoder/__init__.py,sha256=hry59vz21x-AtGZ0MJ7ugw0HTwJkXiddpp_Yqnwsen0,723
11
11
  graphiti_core/cross_encoder/bge_reranker_client.py,sha256=y3TfFxZh0Yvj6HUShmfUm6MC7OPXwWUlv1Qe5HF3S3I,1797
12
12
  graphiti_core/cross_encoder/client.py,sha256=KLsbfWKOEaAV3adFe3XZlAeb-gje9_sVKCVZTaJP3ac,1441
13
- graphiti_core/cross_encoder/gemini_reranker_client.py,sha256=FoOwTqGFQ833X7eAZU11FX7qW4_Fs9iSNyHIc7cslFU,6158
13
+ graphiti_core/cross_encoder/gemini_reranker_client.py,sha256=hmITG5YIib52nrKvINwRi4xTfAO1U4jCCaEVIwImHw0,6208
14
14
  graphiti_core/cross_encoder/openai_reranker_client.py,sha256=hoaGyu9nCNMJyP8si0Bha5Q9CFszfiHQmLgE9IsX7sY,4653
15
15
  graphiti_core/driver/__init__.py,sha256=kCWimqQU19airu5gKwCmZtZuXkDfaQfKSUhMDoL-rTA,626
16
16
  graphiti_core/driver/driver.py,sha256=-FHAA2gM8FA0re-q6udmjQ6pNFdFGRQrMRuAiqX_1A4,1829
@@ -19,7 +19,7 @@ graphiti_core/driver/neo4j_driver.py,sha256=f8cSkcaCDyQLyI85JBprw0rdrarpd5Tq1mlX
19
19
  graphiti_core/embedder/__init__.py,sha256=EL564ZuE-DZjcuKNUK_exMn_XHXm2LdO9fzdXePVKL4,179
20
20
  graphiti_core/embedder/azure_openai.py,sha256=OyomPwC1fIsddI-3n6g00kQFdQznZorBhHwkQKCLUok,2384
21
21
  graphiti_core/embedder/client.py,sha256=qEpSHceL_Gc4QQPJWIOnuNLemNuR_TYA4r28t2Vldbg,1115
22
- graphiti_core/embedder/gemini.py,sha256=RbJnG-GqzzGamxsGrbjLQUu6ayQ-p-sl1y9wb0SsAik,3580
22
+ graphiti_core/embedder/gemini.py,sha256=0O3JCeeINRNF_jfrEPA-__YHpEHWPkXd7IYfsUMi-ng,4080
23
23
  graphiti_core/embedder/openai.py,sha256=bIThUoLMeGlHG2-3VikzK6JZfOHKn4PKvUMx5sHxJy8,2192
24
24
  graphiti_core/embedder/voyage.py,sha256=oJHAZiNqjdEJOKgoKfGWcxK2-Ewqn5UB3vrBwIwP2u4,2546
25
25
  graphiti_core/llm_client/__init__.py,sha256=QgBWUiCeBp6YiA_xqyrDvJ9jIyy1hngH8g7FWahN3nw,776
@@ -28,7 +28,7 @@ graphiti_core/llm_client/azure_openai_client.py,sha256=ekERggAekbb7enes1RJqdRChf
28
28
  graphiti_core/llm_client/client.py,sha256=v_w5TBbDJYYADCXSs2r287g5Ami2Urma-GGEbHSI_Jg,5826
29
29
  graphiti_core/llm_client/config.py,sha256=90IgSBxZE_3nWdaEONVLUznI8lytPA7ZyexQz-_c55U,2560
30
30
  graphiti_core/llm_client/errors.py,sha256=pn6brRiLW60DAUIXJYKBT6MInrS4ueuH1hNLbn_JbQo,1243
31
- graphiti_core/llm_client/gemini_client.py,sha256=LLyos2irtidZL3qZ8gGLk23l9JWuHtxRdrcmFHtn0Uw,13235
31
+ graphiti_core/llm_client/gemini_client.py,sha256=oyAOXc2ArPLulayoTRj2fjrKYP107WWs8LqM8574-vA,13434
32
32
  graphiti_core/llm_client/groq_client.py,sha256=bYLE_cg1QEhugsJOXh4b1vPbxagKeMWqk48240GCzMs,2922
33
33
  graphiti_core/llm_client/openai_base_client.py,sha256=gfMcKPyLrylz_ouRdoenDWXyitmgfFZ17Zthbkq3Qs4,8126
34
34
  graphiti_core/llm_client/openai_client.py,sha256=ykBK94gxzE7iXux5rvOzVNA8q0Sqzq-8njPB75XcRe8,3240
@@ -40,8 +40,8 @@ graphiti_core/models/edges/edge_db_queries.py,sha256=4vSWdmE5MKoDrlHJmmr2xNhVSQ-
40
40
  graphiti_core/models/nodes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
41
  graphiti_core/models/nodes/node_db_queries.py,sha256=AQgRGVO-GgFWfLq1G6k8s86WItwpXruy3Mj4DBli-vM,2145
42
42
  graphiti_core/prompts/__init__.py,sha256=EA-x9xUki9l8wnu2l8ek_oNf75-do5tq5hVq7Zbv8Kw,101
43
- graphiti_core/prompts/dedupe_edges.py,sha256=-Fq8YlCPHOEnjJceSOy68dya3VIbmvMtcS8V9u9Tv6g,5699
44
- graphiti_core/prompts/dedupe_nodes.py,sha256=WdSnqu6O4TkEE_z1u2CEnNH0sWgBNDl4dUx20gSp464,7852
43
+ graphiti_core/prompts/dedupe_edges.py,sha256=_26fV_iLufmt6iWpJ7QduhrNVW3G5dGrx4EbVLuvxNk,5783
44
+ graphiti_core/prompts/dedupe_nodes.py,sha256=GBHSFfkumiQQU8qDnO-kUoSefzDNSzYUpzUl6hPcakc,7740
45
45
  graphiti_core/prompts/eval.py,sha256=gnBQTmwsCl3Qvwpcm7aieVszzo6y1sMCUT8jQiKTvvE,5317
46
46
  graphiti_core/prompts/extract_edge_dates.py,sha256=3Drs3CmvP0gJN5BidWSxrNvLet3HPoTybU3BUIAoc0Y,4218
47
47
  graphiti_core/prompts/extract_edges.py,sha256=4C9sOqdoZCqVZbmkhNCPeNx3R80rzT8eXndySjVRsHc,6622
@@ -61,17 +61,17 @@ graphiti_core/search/search_utils.py,sha256=2g5xL11IUhG2tce2hVwN1bPYWFFxuB3jJh8G
61
61
  graphiti_core/telemetry/__init__.py,sha256=5kALLDlU9bb2v19CdN7qVANsJWyfnL9E60J6FFgzm3o,226
62
62
  graphiti_core/telemetry/telemetry.py,sha256=47LrzOVBCcZxsYPsnSxWFiztHoxYKKxPwyRX0hnbDGc,3230
63
63
  graphiti_core/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
64
- graphiti_core/utils/bulk_utils.py,sha256=YnyXzmOFgqbLdIAIu9Y6aJjUZHhXj8nBnlegkXBTKi8,16344
64
+ graphiti_core/utils/bulk_utils.py,sha256=T8E_b1Jqe5Ipt-bRoF2q5kKJxIF-b6bDydJYnfL8g4M,15416
65
65
  graphiti_core/utils/datetime_utils.py,sha256=Ti-2tnrDFRzBsbfblzsHybsM3jaDLP4-VT2t0VhpIzU,1357
66
66
  graphiti_core/utils/maintenance/__init__.py,sha256=vW4H1KyapTl-OOz578uZABYcpND4wPx3Vt6aAPaXh78,301
67
67
  graphiti_core/utils/maintenance/community_operations.py,sha256=AimQzT7wr4M3ofsUetHa1cPEmhsngqJoNWm3Q-3Hwww,10115
68
- graphiti_core/utils/maintenance/edge_operations.py,sha256=sj4AJ9zPm8ACiC1wSj99bFUUmg4OgFVFnPOSXKfb3T8,21578
68
+ graphiti_core/utils/maintenance/edge_operations.py,sha256=RQ9wtxky4JXUtRzc0KUznY03Ebi1-y9Zn_BJztbSD4o,19219
69
69
  graphiti_core/utils/maintenance/graph_data_operations.py,sha256=NH1FLwVKqnDdt2JKa38g_y2lG08ID5cAR-GPTQccxC4,5403
70
- graphiti_core/utils/maintenance/node_operations.py,sha256=0WdH_VrkVXLV9YX3xPErXOFygOo2N9g3es9yIB2Yl8Q,15876
70
+ graphiti_core/utils/maintenance/node_operations.py,sha256=4jMlmbB3zwK9KzIm2QXRxzA4HAn-SJiNhWMeCacwHh8,14467
71
71
  graphiti_core/utils/maintenance/temporal_operations.py,sha256=mJkw9xLB4W2BsLfC5POr0r-PHWL9SIfNj_l_xu0B5ug,3410
72
72
  graphiti_core/utils/maintenance/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
73
73
  graphiti_core/utils/ontology_utils/entity_types_utils.py,sha256=QJX5cG0GSSNF_Mm_yrldr69wjVAbN_MxLhOSznz85Hk,1279
74
- graphiti_core-0.15.1.dist-info/METADATA,sha256=KGLbx0MsKX6M0lw_26eCbKdy_yYHm8XyaFMKB3QDf7Q,22414
75
- graphiti_core-0.15.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
76
- graphiti_core-0.15.1.dist-info/licenses/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
77
- graphiti_core-0.15.1.dist-info/RECORD,,
74
+ graphiti_core-0.16.0.dist-info/METADATA,sha256=DE6cwPaJL6L0jsmH2Vyeu7fK_vELK7HlksmcP9BoKIU,22973
75
+ graphiti_core-0.16.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
76
+ graphiti_core-0.16.0.dist-info/licenses/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
77
+ graphiti_core-0.16.0.dist-info/RECORD,,