graphiti-core 0.8.0__py3-none-any.whl → 0.8.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphiti-core might be problematic. Click here for more details.

@@ -0,0 +1,20 @@
1
+ """
2
+ Copyright 2025, Zep Software, Inc.
3
+
4
+ Licensed under the Apache License, Version 2.0 (the "License");
5
+ you may not use this file except in compliance with the License.
6
+ You may obtain a copy of the License at
7
+
8
+ http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ Unless required by applicable law or agreed to in writing, software
11
+ distributed under the License is distributed on an "AS IS" BASIS,
12
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ See the License for the specific language governing permissions and
14
+ limitations under the License.
15
+ """
16
+
17
+ from .client import CrossEncoderClient
18
+ from .openai_reranker_client import OpenAIRerankerClient
19
+
20
+ __all__ = ['CrossEncoderClient', 'OpenAIRerankerClient']
@@ -18,7 +18,7 @@ import logging
18
18
  from typing import Any
19
19
 
20
20
  import openai
21
- from openai import AsyncOpenAI
21
+ from openai import AsyncAzureOpenAI, AsyncOpenAI
22
22
  from pydantic import BaseModel
23
23
 
24
24
  from ..helpers import semaphore_gather
@@ -36,21 +36,29 @@ class BooleanClassifier(BaseModel):
36
36
 
37
37
 
38
38
  class OpenAIRerankerClient(CrossEncoderClient):
39
- def __init__(self, config: LLMConfig | None = None):
39
+ def __init__(
40
+ self,
41
+ config: LLMConfig | None = None,
42
+ client: AsyncOpenAI | AsyncAzureOpenAI | None = None,
43
+ ):
40
44
  """
41
- Initialize the OpenAIClient with the provided configuration, cache setting, and client.
45
+ Initialize the OpenAIRerankerClient with the provided configuration and client.
46
+
47
+ This reranker uses the OpenAI API to run a simple boolean classifier prompt concurrently
48
+ for each passage. Log-probabilities are used to rank the passages.
42
49
 
43
50
  Args:
44
51
  config (LLMConfig | None): The configuration for the LLM client, including API key, model, base URL, temperature, and max tokens.
45
- cache (bool): Whether to use caching for responses. Defaults to False.
46
- client (Any | None): An optional async client instance to use. If not provided, a new AsyncOpenAI client is created.
47
-
52
+ client (AsyncOpenAI | AsyncAzureOpenAI | None): An optional async client instance to use. If not provided, a new AsyncOpenAI client is created.
48
53
  """
49
54
  if config is None:
50
55
  config = LLMConfig()
51
56
 
52
57
  self.config = config
53
- self.client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
58
+ if client is None:
59
+ self.client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
60
+ else:
61
+ self.client = client
54
62
 
55
63
  async def rank(self, query: str, passages: list[str]) -> list[tuple[str, float]]:
56
64
  openai_messages_list: Any = [
@@ -62,7 +70,7 @@ class OpenAIRerankerClient(CrossEncoderClient):
62
70
  Message(
63
71
  role='user',
64
72
  content=f"""
65
- Respond with "True" if PASSAGE is relevant to QUERY and "False" otherwise.
73
+ Respond with "True" if PASSAGE is relevant to QUERY and "False" otherwise.
66
74
  <PASSAGE>
67
75
  {passage}
68
76
  </PASSAGE>
@@ -15,8 +15,9 @@ limitations under the License.
15
15
  """
16
16
 
17
17
  from collections.abc import Iterable
18
+ from typing import Union
18
19
 
19
- from openai import AsyncOpenAI
20
+ from openai import AsyncAzureOpenAI, AsyncOpenAI
20
21
  from openai.types import EmbeddingModel
21
22
 
22
23
  from .client import EmbedderClient, EmbedderConfig
@@ -33,13 +34,23 @@ class OpenAIEmbedderConfig(EmbedderConfig):
33
34
  class OpenAIEmbedder(EmbedderClient):
34
35
  """
35
36
  OpenAI Embedder Client
37
+
38
+ This client supports both AsyncOpenAI and AsyncAzureOpenAI clients.
36
39
  """
37
40
 
38
- def __init__(self, config: OpenAIEmbedderConfig | None = None):
41
+ def __init__(
42
+ self,
43
+ config: OpenAIEmbedderConfig | None = None,
44
+ client: Union[AsyncOpenAI, AsyncAzureOpenAI, None] = None,
45
+ ):
39
46
  if config is None:
40
47
  config = OpenAIEmbedderConfig()
41
48
  self.config = config
42
- self.client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
49
+
50
+ if client is not None:
51
+ self.client = client
52
+ else:
53
+ self.client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
43
54
 
44
55
  async def create(
45
56
  self, input_data: str | list[str] | Iterable[int] | Iterable[Iterable[int]]
@@ -30,10 +30,10 @@ EPISODIC_NODE_SAVE_BULK = """
30
30
  """
31
31
 
32
32
  ENTITY_NODE_SAVE = """
33
- MERGE (n:Entity {uuid: $uuid})
33
+ MERGE (n:Entity {uuid: $entity_data.uuid})
34
34
  SET n:$($labels)
35
35
  SET n = $entity_data
36
- WITH n CALL db.create.setNodeVectorProperty(n, "name_embedding", $name_embedding)
36
+ WITH n CALL db.create.setNodeVectorProperty(n, "name_embedding", $entity_data.name_embedding)
37
37
  RETURN n.uuid AS uuid"""
38
38
 
39
39
  ENTITY_NODE_SAVE_BULK = """
@@ -189,6 +189,9 @@ async def extract_nodes(
189
189
  new_nodes = []
190
190
  for name in extracted_node_names:
191
191
  entity_type = node_classifications.get(name)
192
+ if entity_types is not None and entity_type not in entity_types:
193
+ entity_type = None
194
+
192
195
  labels = (
193
196
  ['Entity']
194
197
  if entity_type is None or entity_type == 'None' or entity_type == 'null'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: graphiti-core
3
- Version: 0.8.0
3
+ Version: 0.8.2
4
4
  Summary: A temporal graph building library
5
5
  License: Apache-2.0
6
6
  Author: Paul Paliychuk
@@ -29,7 +29,7 @@ Description-Content-Type: text/markdown
29
29
  <h1 align="center">
30
30
  Graphiti
31
31
  </h1>
32
- <h2 align="center"> Temporal Knowledge Graphs for Agentic Applications</h2>
32
+ <h2 align="center"> Build Real-Time Knowledge Graphs for AI Agents</h2>
33
33
  <br />
34
34
 
35
35
  [![Discord](https://dcbadge.vercel.app/api/server/W8Kw6bsgXQ?style=flat)](https://discord.com/invite/W8Kw6bsgXQ)
@@ -38,12 +38,16 @@ Graphiti
38
38
  [![MyPy Check](https://github.com/getzep/Graphiti/actions/workflows/typecheck.yml/badge.svg)](https://github.com/getzep/Graphiti/actions/workflows/typecheck.yml)
39
39
  [![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/getzep/Graphiti)
40
40
 
41
- :star: *Help us reach more developers and grow the Graphiti community. Star this repo!*
41
+ :star: _Help us reach more developers and grow the Graphiti community. Star this repo!_
42
42
  <br />
43
43
 
44
- Graphiti builds dynamic, temporally aware Knowledge Graphs that represent complex, evolving relationships between
45
- entities over time. Graphiti ingests both unstructured and structured data, and the resulting graph may be queried using
46
- a fusion of time, full-text, semantic, and graph algorithm approaches.
44
+ Graphiti is a framework for building and querying temporally-aware knowledge graphs, specifically tailored for AI agents operating in dynamic environments. Unlike traditional retrieval-augmented generation (RAG) methods, Graphiti continuously integrates user interactions, structured and unstructured enterprise data, and external information into a coherent, queryable graph. The framework supports incremental data updates, efficient retrieval, and precise historical queries without requiring complete graph recomputation, making it suitable for developing interactive, context-aware AI applications.
45
+
46
+ Use Graphiti to:
47
+
48
+ - Integrate and maintain dynamic user interactions and business data.
49
+ - Facilitate state-based reasoning and task automation for agents.
50
+ - Query complex, evolving data with semantic, keyword, and graph-based search methods.
47
51
 
48
52
  <br />
49
53
 
@@ -53,31 +57,21 @@ a fusion of time, full-text, semantic, and graph algorithm approaches.
53
57
 
54
58
  <br />
55
59
 
56
- Graphiti helps you create and query Knowledge Graphs that evolve over time. A knowledge graph is a network of
57
- interconnected facts, such as _“Kendra loves Adidas shoes.”_ Each fact is a “triplet” represented by two entities, or
60
+ A knowledge graph is a network of interconnected facts, such as _“Kendra loves Adidas shoes.”_ Each fact is a “triplet” represented by two entities, or
58
61
  nodes (_”Kendra”_, _“Adidas shoes”_), and their relationship, or edge (_”loves”_). Knowledge Graphs have been explored
59
62
  extensively for information retrieval. What makes Graphiti unique is its ability to autonomously build a knowledge graph
60
63
  while handling changing relationships and maintaining historical context.
61
64
 
62
- With Graphiti, you can build LLM applications such as:
63
-
64
- - Assistants that learn from user interactions, fusing personal knowledge with dynamic data from business systems like
65
- CRMs and billing platforms.
66
- - Agents that autonomously execute complex tasks, reasoning with state changes from multiple dynamic sources.
67
-
68
- Graphiti supports a wide range of applications in sales, customer service, health, finance, and more, enabling long-term
69
- recall and state-based reasoning for both assistants and agents.
70
-
71
65
  ## Graphiti and Zep Memory
72
66
 
73
- Graphiti powers the core of [Zep's memory layer](https://www.getzep.com) for LLM-powered Assistants and Agents.
67
+ Graphiti powers the core of [Zep's memory layer](https://www.getzep.com) for AI Agents.
74
68
 
75
69
  Using Graphiti, we've demonstrated Zep is
76
70
  the [State of the Art in Agent Memory](https://blog.getzep.com/state-of-the-art-agent-memory/).
77
71
 
78
72
  Read our paper: [Zep: A Temporal Knowledge Graph Architecture for Agent Memory](https://arxiv.org/abs/2501.13956).
79
73
 
80
- We're excited to open-source Graphiti, believing its potential reaches far beyond memory applications.
74
+ We're excited to open-source Graphiti, believing its potential reaches far beyond AI memory applications.
81
75
 
82
76
  <p align="center">
83
77
  <a href="https://arxiv.org/abs/2501.13956"><img src="images/arxiv-screenshot.png" alt="Zep: A Temporal Knowledge Graph Architecture for Agent Memory" width="700px"></a>
@@ -85,34 +79,41 @@ We're excited to open-source Graphiti, believing its potential reaches far beyon
85
79
 
86
80
  ## Why Graphiti?
87
81
 
88
- We were intrigued by Microsoft’s GraphRAG, which expanded on RAG text chunking by using a graph to better model a
89
- document corpus and making this representation available via semantic and graph search techniques. However, GraphRAG did
90
- not address our core problem: It's primarily designed for static documents and doesn't inherently handle temporal
91
- aspects of data.
92
-
93
- Graphiti is designed from the ground up to handle constantly changing information, hybrid semantic and graph search, and
94
- scale:
82
+ Traditional RAG approaches often rely on batch processing and static data summarization, making them inefficient for frequently changing data. Graphiti addresses these challenges by providing:
95
83
 
96
- - **Temporal Awareness:** Tracks changes in facts and relationships over time, enabling point-in-time queries. Graph
97
- edges include temporal metadata to record relationship lifecycles.
98
- - **Episodic Processing:** Ingests data as discrete episodes, maintaining data provenance and allowing incremental
99
- entity and relationship extraction.
100
- - **Hybrid Search:** Combines semantic and BM25 full-text search, with the ability to rerank results by distance from a
101
- central node e.g. “Kendra”.
102
- - **Scalable:** Designed for processing large datasets, with parallelization of LLM calls for bulk processing while
103
- preserving the chronology of events.
104
- - **Supports Varied Sources:** Can ingest both unstructured text and structured JSON data.
84
+ - **Real-Time Incremental Updates:** Immediate integration of new data episodes without batch recomputation.
85
+ - **Bi-Temporal Data Model:** Explicit tracking of event occurrence and ingestion times, allowing accurate point-in-time queries.
86
+ - **Efficient Hybrid Retrieval:** Combines semantic embeddings, keyword (BM25), and graph traversal to achieve low-latency queries without reliance on LLM summarization.
87
+ - **Custom Entity Definitions:** Flexible ontology creation and support for developer-defined entities through straightforward Pydantic models.
88
+ - **Scalability:** Efficiently manages large datasets with parallel processing, suitable for enterprise environments.
105
89
 
106
90
  <p align="center">
107
91
  <img src="/images/graphiti-intro-slides-stock-2.gif" alt="Graphiti structured + unstructured demo" width="700px">
108
92
  </p>
109
93
 
94
+ ## Graphiti vs. GraphRAG
95
+
96
+ | Aspect | GraphRAG | Graphiti |
97
+ | -------------------------- | ------------------------------------- | ------------------------------------------------ |
98
+ | **Primary Use** | Static document summarization | Dynamic data management |
99
+ | **Data Handling** | Batch-oriented processing | Continuous, incremental updates |
100
+ | **Knowledge Structure** | Entity clusters & community summaries | Episodic data, semantic entities, communities |
101
+ | **Retrieval Method** | Sequential LLM summarization | Hybrid semantic, keyword, and graph-based search |
102
+ | **Adaptability** | Low | High |
103
+ | **Temporal Handling** | Basic timestamp tracking | Explicit bi-temporal tracking |
104
+ | **Contradiction Handling** | LLM-driven summarization judgments | Temporal edge invalidation |
105
+ | **Query Latency** | Seconds to tens of seconds | Typically sub-second latency |
106
+ | **Custom Entity Types** | No | Yes, customizable |
107
+ | **Scalability** | Moderate | High, optimized for large datasets |
108
+
109
+ Graphiti is specifically designed to address the challenges of dynamic and frequently updated datasets, making it particularly suitable for applications requiring real-time interaction and precise historical queries.
110
+
110
111
  ## Installation
111
112
 
112
113
  Requirements:
113
114
 
114
115
  - Python 3.10 or higher
115
- - Neo4j 5.26 or higher
116
+ - Neo4j 5.26 or higher (serves as the embeddings storage backend)
116
117
  - OpenAI API key (for LLM inference and embedding)
117
118
 
118
119
  Optional:
@@ -145,7 +146,7 @@ from graphiti_core import Graphiti
145
146
  from graphiti_core.nodes import EpisodeType
146
147
  from datetime import datetime, timezone
147
148
 
148
- # Initialize Graphiti
149
+ # Initialize Graphiti as Your Memory Layer
149
150
  graphiti = Graphiti("bolt://localhost:7687", "neo4j", "password")
150
151
 
151
152
  # Initialize the graph database with Graphiti's indices. This only needs to be done once.
@@ -166,7 +167,7 @@ for i, episode in enumerate(episodes):
166
167
  reference_time=datetime.now(timezone.utc)
167
168
  )
168
169
 
169
- # Search the graph
170
+ # Search the graph for semantic memory retrieval
170
171
  # Execute a hybrid search combining semantic similarity and BM25 retrieval
171
172
  # Results are combined and reranked using Reciprocal Rank Fusion
172
173
  results = await graphiti.search('Who was the California Attorney General?')
@@ -198,11 +199,12 @@ results = await graphiti.search('Who was the California Attorney General?')
198
199
  # Results are weighted by their proximity, with distant edges receiving lower scores.
199
200
  await graphiti.search('Who was the California Attorney General?', center_node_uuid)
200
201
 
201
- # Close the connection
202
+ # Close the connection when chat state management is complete
202
203
  graphiti.close()
203
204
  ```
204
205
 
205
206
  ## Graph Service
207
+
206
208
  The `server` directory contains an API service for interacting with the Graphiti API. It is built using FastAPI.
207
209
 
208
210
  Please see the [server README](./server/README.md) for more information.
@@ -218,6 +220,54 @@ to enable Neo4j's parallel runtime feature for several of our search queries.
218
220
  Note that this feature is not supported for Neo4j Community edition or for smaller AuraDB instances,
219
221
  as such this feature is off by default.
220
222
 
223
+ ## Using Graphiti with Azure OpenAI
224
+
225
+ Graphiti supports Azure OpenAI for both LLM inference and embeddings. To use Azure OpenAI, you'll need to configure both the LLM client and embedder with your Azure OpenAI credentials.
226
+
227
+ ```python
228
+ from openai import AsyncAzureOpenAI
229
+ from graphiti_core import Graphiti
230
+ from graphiti_core.llm_client import OpenAIClient
231
+ from graphiti_core.embedder.openai import OpenAIEmbedder, OpenAIEmbedderConfig
232
+ from graphiti_core.cross_encoder.openai_reranker_client import OpenAIRerankerClient
233
+
234
+ # Azure OpenAI configuration
235
+ api_key = "<your-api-key>"
236
+ api_version = "<your-api-version>"
237
+ azure_endpoint = "<your-azure-endpoint>"
238
+
239
+ # Create Azure OpenAI client for LLM
240
+ azure_openai_client = AsyncAzureOpenAI(
241
+ api_key=api_key,
242
+ api_version=api_version,
243
+ azure_endpoint=azure_endpoint
244
+ )
245
+
246
+ # Initialize Graphiti with Azure OpenAI clients
247
+ graphiti = Graphiti(
248
+ "bolt://localhost:7687",
249
+ "neo4j",
250
+ "password",
251
+ llm_client=OpenAIClient(
252
+ client=azure_openai_client
253
+ ),
254
+ embedder=OpenAIEmbedder(
255
+ config=OpenAIEmbedderConfig(
256
+ embedding_model="text-embedding-3-small" # Use your Azure deployed embedding model name
257
+ ),
258
+ client=azure_openai_client
259
+ ),
260
+ # Optional: Configure the OpenAI cross encoder with Azure OpenAI
261
+ cross_encoder=OpenAIRerankerClient(
262
+ client=azure_openai_client
263
+ )
264
+ )
265
+
266
+ # Now you can use Graphiti with Azure OpenAI
267
+ ```
268
+
269
+ Make sure to replace the placeholder values with your actual Azure OpenAI credentials and specify the correct embedding model name that's deployed in your Azure OpenAI service.
270
+
221
271
  ## Documentation
222
272
 
223
273
  - [Guides and API documentation](https://help.getzep.com/graphiti).
@@ -228,10 +278,11 @@ as such this feature is off by default.
228
278
 
229
279
  Graphiti is under active development. We aim to maintain API stability while working on:
230
280
 
231
- - [ ] Supporting custom graph schemas:
232
- - Allow developers to provide their own defined node and edge classes when ingesting episodes
233
- - Enable more flexible knowledge representation tailored to specific use cases
281
+ - [x] Supporting custom graph schemas:
282
+ - Allow developers to provide their own defined node and edge classes when ingesting episodes
283
+ - Enable more flexible knowledge representation tailored to specific use cases
234
284
  - [x] Enhancing retrieval capabilities with more robust and configurable options
285
+ - [ ] Graphiti MCP Server
235
286
  - [ ] Expanding test coverage to ensure reliability and catch edge cases
236
287
 
237
288
  ## Contributing
@@ -1,12 +1,12 @@
1
1
  graphiti_core/__init__.py,sha256=e5SWFkRiaUwfprYIeIgVIh7JDedNiloZvd3roU-0aDY,55
2
- graphiti_core/cross_encoder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ graphiti_core/cross_encoder/__init__.py,sha256=hry59vz21x-AtGZ0MJ7ugw0HTwJkXiddpp_Yqnwsen0,723
3
3
  graphiti_core/cross_encoder/bge_reranker_client.py,sha256=xgXZqB_qoaWQPjnmuf1ne38YPyhhvApySKcQDaHc9R4,1435
4
4
  graphiti_core/cross_encoder/client.py,sha256=KLsbfWKOEaAV3adFe3XZlAeb-gje9_sVKCVZTaJP3ac,1441
5
- graphiti_core/cross_encoder/openai_reranker_client.py,sha256=DECh36QdkM4mE5qgg158ss6G4si2DjgOTqrzGOz5C9s,4089
5
+ graphiti_core/cross_encoder/openai_reranker_client.py,sha256=e-QCftckZ7GLQgJ-ijljw6vzVa6H417cZ3cISB70M5g,4377
6
6
  graphiti_core/edges.py,sha256=WXJcJp4fhdlgL4sAsh3YBqkMRAvHKmmaC_7uCPLzePI,15680
7
7
  graphiti_core/embedder/__init__.py,sha256=eWd-0sPxflnYXLoWNT9sxwCIFun5JNO9Fk4E-ZXXf8Y,164
8
8
  graphiti_core/embedder/client.py,sha256=HKIlpPLnzFT81jurPkry6z8F8nxfZVfejdcfxHVUSFU,995
9
- graphiti_core/embedder/openai.py,sha256=FzEM9rtSDK1wTb4iYKjNjjdFf8BEBTDxG2vM_E-5W-8,1621
9
+ graphiti_core/embedder/openai.py,sha256=23BnPA10eiaa1HkxHKYSj75-0PymczPK2FNNIz8Txbc,1910
10
10
  graphiti_core/embedder/voyage.py,sha256=7kqrLG75J3Q6cdA2Nlx1JSYtpk2141ckdl3OtDDw0vU,1882
11
11
  graphiti_core/errors.py,sha256=ddHrHGQxhwkVAtSph4AV84UoOlgwZufMczXPwB7uqPo,1795
12
12
  graphiti_core/graphiti.py,sha256=DP2hd1aXIYh-nXVfxugHvw7Tbax50w83IKMZf9Z5BeI,29128
@@ -24,7 +24,7 @@ graphiti_core/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
24
24
  graphiti_core/models/edges/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  graphiti_core/models/edges/edge_db_queries.py,sha256=2UoLkmazO-FJYqjc3g0LuL-pyjekzQxxed_XHVv_HZE,2671
26
26
  graphiti_core/models/nodes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
- graphiti_core/models/nodes/node_db_queries.py,sha256=f4_UT6XL8UDt4_CO9YIHeI8pvpw_vrutA9SYrgi6QCU,2121
27
+ graphiti_core/models/nodes/node_db_queries.py,sha256=AQgRGVO-GgFWfLq1G6k8s86WItwpXruy3Mj4DBli-vM,2145
28
28
  graphiti_core/nodes.py,sha256=0lH8SOpnzTtNIvG4ScnJ3SeQudviCsZwsnM867kY1aI,16998
29
29
  graphiti_core/prompts/__init__.py,sha256=EA-x9xUki9l8wnu2l8ek_oNf75-do5tq5hVq7Zbv8Kw,101
30
30
  graphiti_core/prompts/dedupe_edges.py,sha256=EuX8ngeItBzrlMBOgeHrpExzxIFHD2aoDyaX1ZniF6I,3556
@@ -52,10 +52,10 @@ graphiti_core/utils/maintenance/__init__.py,sha256=TRY3wWWu5kn3Oahk_KKhltrWnh0NA
52
52
  graphiti_core/utils/maintenance/community_operations.py,sha256=gIw1M5HGgc2c3TXag5ygPPpAv5WsG-yoC8Lhmfr6FMs,10011
53
53
  graphiti_core/utils/maintenance/edge_operations.py,sha256=tNw56vN586JYZMgie6RLRTiHZ680-kWzDIxW8ucL6SU,12780
54
54
  graphiti_core/utils/maintenance/graph_data_operations.py,sha256=qds9ALk9PhpQs1CNZTZGpi70mqJ93Y2KhIh9X2r8MUI,6533
55
- graphiti_core/utils/maintenance/node_operations.py,sha256=UGCRjnqVNDjDxd7dIhwC16w0tcrthotlW0gDpLrVuLE,15118
55
+ graphiti_core/utils/maintenance/node_operations.py,sha256=H0DAL2Qau4weIcEIrVVSdntVQtN_y4Hzoik6GldP1XA,15223
56
56
  graphiti_core/utils/maintenance/temporal_operations.py,sha256=RdNtubCyYhOVrvcOIq2WppHls1Q-BEjtsN8r38l-Rtc,3691
57
57
  graphiti_core/utils/maintenance/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
- graphiti_core-0.8.0.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
59
- graphiti_core-0.8.0.dist-info/METADATA,sha256=T5__jpHmuwYi4SuqjbjPYr2JRC4M24u8Ssrf2dRKuuI,10541
60
- graphiti_core-0.8.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
61
- graphiti_core-0.8.0.dist-info/RECORD,,
58
+ graphiti_core-0.8.2.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
59
+ graphiti_core-0.8.2.dist-info/METADATA,sha256=NtjJHZn-nXyZb4ojlhL5ZLPmlWn3kk7t6j5xdv9KQPE,13585
60
+ graphiti_core-0.8.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
61
+ graphiti_core-0.8.2.dist-info/RECORD,,