graphiti-core 0.7.9__py3-none-any.whl → 0.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphiti-core might be problematic. Click here for more details.

@@ -0,0 +1,20 @@
1
+ """
2
+ Copyright 2025, Zep Software, Inc.
3
+
4
+ Licensed under the Apache License, Version 2.0 (the "License");
5
+ you may not use this file except in compliance with the License.
6
+ You may obtain a copy of the License at
7
+
8
+ http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ Unless required by applicable law or agreed to in writing, software
11
+ distributed under the License is distributed on an "AS IS" BASIS,
12
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ See the License for the specific language governing permissions and
14
+ limitations under the License.
15
+ """
16
+
17
+ from .client import CrossEncoderClient
18
+ from .openai_reranker_client import OpenAIRerankerClient
19
+
20
+ __all__ = ['CrossEncoderClient', 'OpenAIRerankerClient']
@@ -18,7 +18,7 @@ import logging
18
18
  from typing import Any
19
19
 
20
20
  import openai
21
- from openai import AsyncOpenAI
21
+ from openai import AsyncAzureOpenAI, AsyncOpenAI
22
22
  from pydantic import BaseModel
23
23
 
24
24
  from ..helpers import semaphore_gather
@@ -36,21 +36,29 @@ class BooleanClassifier(BaseModel):
36
36
 
37
37
 
38
38
  class OpenAIRerankerClient(CrossEncoderClient):
39
- def __init__(self, config: LLMConfig | None = None):
39
+ def __init__(
40
+ self,
41
+ config: LLMConfig | None = None,
42
+ client: AsyncOpenAI | AsyncAzureOpenAI | None = None,
43
+ ):
40
44
  """
41
- Initialize the OpenAIClient with the provided configuration, cache setting, and client.
45
+ Initialize the OpenAIRerankerClient with the provided configuration and client.
46
+
47
+ This reranker uses the OpenAI API to run a simple boolean classifier prompt concurrently
48
+ for each passage. Log-probabilities are used to rank the passages.
42
49
 
43
50
  Args:
44
51
  config (LLMConfig | None): The configuration for the LLM client, including API key, model, base URL, temperature, and max tokens.
45
- cache (bool): Whether to use caching for responses. Defaults to False.
46
- client (Any | None): An optional async client instance to use. If not provided, a new AsyncOpenAI client is created.
47
-
52
+ client (AsyncOpenAI | AsyncAzureOpenAI | None): An optional async client instance to use. If not provided, a new AsyncOpenAI client is created.
48
53
  """
49
54
  if config is None:
50
55
  config = LLMConfig()
51
56
 
52
57
  self.config = config
53
- self.client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
58
+ if client is None:
59
+ self.client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
60
+ else:
61
+ self.client = client
54
62
 
55
63
  async def rank(self, query: str, passages: list[str]) -> list[tuple[str, float]]:
56
64
  openai_messages_list: Any = [
@@ -62,7 +70,7 @@ class OpenAIRerankerClient(CrossEncoderClient):
62
70
  Message(
63
71
  role='user',
64
72
  content=f"""
65
- Respond with "True" if PASSAGE is relevant to QUERY and "False" otherwise.
73
+ Respond with "True" if PASSAGE is relevant to QUERY and "False" otherwise.
66
74
  <PASSAGE>
67
75
  {passage}
68
76
  </PASSAGE>
graphiti_core/edges.py CHANGED
@@ -337,6 +337,32 @@ class EntityEdge(Edge):
337
337
  raise GroupsEdgesNotFoundError(group_ids)
338
338
  return edges
339
339
 
340
+ @classmethod
341
+ async def get_by_node_uuid(cls, driver: AsyncDriver, node_uuid: str):
342
+ query: LiteralString = """
343
+ MATCH (n:Entity {uuid: $node_uuid})-[e:RELATES_TO]-(m:Entity)
344
+ RETURN DISTINCT
345
+ e.uuid AS uuid,
346
+ n.uuid AS source_node_uuid,
347
+ m.uuid AS target_node_uuid,
348
+ e.created_at AS created_at,
349
+ e.name AS name,
350
+ e.group_id AS group_id,
351
+ e.fact AS fact,
352
+ e.fact_embedding AS fact_embedding,
353
+ e.episodes AS episodes,
354
+ e.expired_at AS expired_at,
355
+ e.valid_at AS valid_at,
356
+ e.invalid_at AS invalid_at
357
+ """
358
+ records, _, _ = await driver.execute_query(
359
+ query, node_uuid=node_uuid, database_=DEFAULT_DATABASE, routing_='r'
360
+ )
361
+
362
+ edges = [get_entity_edge_from_record(record) for record in records]
363
+
364
+ return edges
365
+
340
366
 
341
367
  class CommunityEdge(Edge):
342
368
  async def save(self, driver: AsyncDriver):
@@ -15,8 +15,9 @@ limitations under the License.
15
15
  """
16
16
 
17
17
  from collections.abc import Iterable
18
+ from typing import Union
18
19
 
19
- from openai import AsyncOpenAI
20
+ from openai import AsyncAzureOpenAI, AsyncOpenAI
20
21
  from openai.types import EmbeddingModel
21
22
 
22
23
  from .client import EmbedderClient, EmbedderConfig
@@ -33,13 +34,23 @@ class OpenAIEmbedderConfig(EmbedderConfig):
33
34
  class OpenAIEmbedder(EmbedderClient):
34
35
  """
35
36
  OpenAI Embedder Client
37
+
38
+ This client supports both AsyncOpenAI and AsyncAzureOpenAI clients.
36
39
  """
37
40
 
38
- def __init__(self, config: OpenAIEmbedderConfig | None = None):
41
+ def __init__(
42
+ self,
43
+ config: OpenAIEmbedderConfig | None = None,
44
+ client: Union[AsyncOpenAI, AsyncAzureOpenAI, None] = None,
45
+ ):
39
46
  if config is None:
40
47
  config = OpenAIEmbedderConfig()
41
48
  self.config = config
42
- self.client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
49
+
50
+ if client is not None:
51
+ self.client = client
52
+ else:
53
+ self.client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
43
54
 
44
55
  async def create(
45
56
  self, input_data: str | list[str] | Iterable[int] | Iterable[Iterable[int]]
graphiti_core/graphiti.py CHANGED
@@ -39,7 +39,6 @@ from graphiti_core.search.search_config_recipes import (
39
39
  from graphiti_core.search.search_filters import SearchFilters
40
40
  from graphiti_core.search.search_utils import (
41
41
  RELEVANT_SCHEMA_LIMIT,
42
- get_communities_by_nodes,
43
42
  get_mentioned_nodes,
44
43
  get_relevant_edges,
45
44
  get_relevant_nodes,
@@ -708,7 +707,7 @@ class Graphiti:
708
707
  bfs_origin_node_uuids,
709
708
  )
710
709
 
711
- async def get_episode_mentions(self, episode_uuids: list[str]) -> SearchResults:
710
+ async def get_nodes_and_edges_by_episode(self, episode_uuids: list[str]) -> SearchResults:
712
711
  episodes = await EpisodicNode.get_by_uuids(self.driver, episode_uuids)
713
712
 
714
713
  edges_list = await semaphore_gather(
@@ -719,9 +718,7 @@ class Graphiti:
719
718
 
720
719
  nodes = await get_mentioned_nodes(self.driver, episodes)
721
720
 
722
- communities = await get_communities_by_nodes(self.driver, nodes)
723
-
724
- return SearchResults(edges=edges, nodes=nodes, communities=communities)
721
+ return SearchResults(edges=edges, nodes=nodes, communities=[])
725
722
 
726
723
  async def add_triplet(self, source_node: EntityNode, edge: EntityEdge, target_node: EntityNode):
727
724
  if source_node.name_embedding is None:
graphiti_core/helpers.py CHANGED
@@ -28,7 +28,7 @@ load_dotenv()
28
28
  DEFAULT_DATABASE = os.getenv('DEFAULT_DATABASE', None)
29
29
  USE_PARALLEL_RUNTIME = bool(os.getenv('USE_PARALLEL_RUNTIME', False))
30
30
  SEMAPHORE_LIMIT = int(os.getenv('SEMAPHORE_LIMIT', 20))
31
- MAX_REFLEXION_ITERATIONS = 2
31
+ MAX_REFLEXION_ITERATIONS = int(os.getenv('MAX_REFLEXION_ITERATIONS', 2))
32
32
  DEFAULT_PAGE_LIMIT = 20
33
33
 
34
34
 
@@ -189,6 +189,9 @@ async def extract_nodes(
189
189
  new_nodes = []
190
190
  for name in extracted_node_names:
191
191
  entity_type = node_classifications.get(name)
192
+ if entity_types is not None and entity_type not in entity_types:
193
+ entity_type = None
194
+
192
195
  labels = (
193
196
  ['Entity']
194
197
  if entity_type is None or entity_type == 'None' or entity_type == 'null'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: graphiti-core
3
- Version: 0.7.9
3
+ Version: 0.8.1
4
4
  Summary: A temporal graph building library
5
5
  License: Apache-2.0
6
6
  Author: Paul Paliychuk
@@ -38,12 +38,12 @@ Graphiti
38
38
  [![MyPy Check](https://github.com/getzep/Graphiti/actions/workflows/typecheck.yml/badge.svg)](https://github.com/getzep/Graphiti/actions/workflows/typecheck.yml)
39
39
  [![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/getzep/Graphiti)
40
40
 
41
- :star: *Help us reach more developers and grow the Graphiti community. Star this repo!*
41
+ :star: _Help us reach more developers and grow the Graphiti community. Star this repo!_
42
42
  <br />
43
43
 
44
44
  Graphiti builds dynamic, temporally aware Knowledge Graphs that represent complex, evolving relationships between
45
45
  entities over time. Graphiti ingests both unstructured and structured data, and the resulting graph may be queried using
46
- a fusion of time, full-text, semantic, and graph algorithm approaches.
46
+ a fusion of time, full-text, semantic, and graph algorithm approaches, effectively serving as a powerful memory layer for AI applications.
47
47
 
48
48
  <br />
49
49
 
@@ -62,11 +62,10 @@ while handling changing relationships and maintaining historical context.
62
62
  With Graphiti, you can build LLM applications such as:
63
63
 
64
64
  - Assistants that learn from user interactions, fusing personal knowledge with dynamic data from business systems like
65
- CRMs and billing platforms.
66
- - Agents that autonomously execute complex tasks, reasoning with state changes from multiple dynamic sources.
65
+ CRMs and billing platforms through robust conversation history management.
66
+ - Agents that autonomously execute complex tasks, reasoning with state changes from multiple dynamic sources through persistent memory.
67
67
 
68
- Graphiti supports a wide range of applications in sales, customer service, health, finance, and more, enabling long-term
69
- recall and state-based reasoning for both assistants and agents.
68
+ Graphiti supports a wide range of applications in sales, customer service, health, finance, and more, enabling long-term recall and state-based reasoning for both assistants and agents.
70
69
 
71
70
  ## Graphiti and Zep Memory
72
71
 
@@ -77,7 +76,7 @@ the [State of the Art in Agent Memory](https://blog.getzep.com/state-of-the-art-
77
76
 
78
77
  Read our paper: [Zep: A Temporal Knowledge Graph Architecture for Agent Memory](https://arxiv.org/abs/2501.13956).
79
78
 
80
- We're excited to open-source Graphiti, believing its potential reaches far beyond memory applications.
79
+ We're excited to open-source Graphiti, believing its potential reaches far beyond AI memory applications.
81
80
 
82
81
  <p align="center">
83
82
  <a href="https://arxiv.org/abs/2501.13956"><img src="images/arxiv-screenshot.png" alt="Zep: A Temporal Knowledge Graph Architecture for Agent Memory" width="700px"></a>
@@ -85,7 +84,7 @@ We're excited to open-source Graphiti, believing its potential reaches far beyon
85
84
 
86
85
  ## Why Graphiti?
87
86
 
88
- We were intrigued by Microsofts GraphRAG, which expanded on RAG text chunking by using a graph to better model a
87
+ We were intrigued by Microsoft's GraphRAG, which expanded on RAG (Retrieval-Augmented Generation) text chunking by using a graph to better model a
89
88
  document corpus and making this representation available via semantic and graph search techniques. However, GraphRAG did
90
89
  not address our core problem: It's primarily designed for static documents and doesn't inherently handle temporal
91
90
  aspects of data.
@@ -94,13 +93,13 @@ Graphiti is designed from the ground up to handle constantly changing informatio
94
93
  scale:
95
94
 
96
95
  - **Temporal Awareness:** Tracks changes in facts and relationships over time, enabling point-in-time queries. Graph
97
- edges include temporal metadata to record relationship lifecycles.
96
+ edges include temporal metadata to record relationship lifecycles, creating a comprehensive context window extension.
98
97
  - **Episodic Processing:** Ingests data as discrete episodes, maintaining data provenance and allowing incremental
99
- entity and relationship extraction.
98
+ entity and relationship extraction, ideal for chat state management.
100
99
  - **Hybrid Search:** Combines semantic and BM25 full-text search, with the ability to rerank results by distance from a
101
- central node e.g. Kendra”.
100
+ central node e.g. "Kendra".
102
101
  - **Scalable:** Designed for processing large datasets, with parallelization of LLM calls for bulk processing while
103
- preserving the chronology of events.
102
+ preserving the chronology of events and enabling efficient knowledge retrieval.
104
103
  - **Supports Varied Sources:** Can ingest both unstructured text and structured JSON data.
105
104
 
106
105
  <p align="center">
@@ -112,7 +111,7 @@ scale:
112
111
  Requirements:
113
112
 
114
113
  - Python 3.10 or higher
115
- - Neo4j 5.26 or higher
114
+ - Neo4j 5.26 or higher (serves as the embeddings storage backend)
116
115
  - OpenAI API key (for LLM inference and embedding)
117
116
 
118
117
  Optional:
@@ -145,7 +144,7 @@ from graphiti_core import Graphiti
145
144
  from graphiti_core.nodes import EpisodeType
146
145
  from datetime import datetime, timezone
147
146
 
148
- # Initialize Graphiti
147
+ # Initialize Graphiti as Your Memory Layer
149
148
  graphiti = Graphiti("bolt://localhost:7687", "neo4j", "password")
150
149
 
151
150
  # Initialize the graph database with Graphiti's indices. This only needs to be done once.
@@ -166,7 +165,7 @@ for i, episode in enumerate(episodes):
166
165
  reference_time=datetime.now(timezone.utc)
167
166
  )
168
167
 
169
- # Search the graph
168
+ # Search the graph for semantic memory retrieval
170
169
  # Execute a hybrid search combining semantic similarity and BM25 retrieval
171
170
  # Results are combined and reranked using Reciprocal Rank Fusion
172
171
  results = await graphiti.search('Who was the California Attorney General?')
@@ -198,11 +197,12 @@ results = await graphiti.search('Who was the California Attorney General?')
198
197
  # Results are weighted by their proximity, with distant edges receiving lower scores.
199
198
  await graphiti.search('Who was the California Attorney General?', center_node_uuid)
200
199
 
201
- # Close the connection
200
+ # Close the connection when chat state management is complete
202
201
  graphiti.close()
203
202
  ```
204
203
 
205
204
  ## Graph Service
205
+
206
206
  The `server` directory contains an API service for interacting with the Graphiti API. It is built using FastAPI.
207
207
 
208
208
  Please see the [server README](./server/README.md) for more information.
@@ -218,6 +218,54 @@ to enable Neo4j's parallel runtime feature for several of our search queries.
218
218
  Note that this feature is not supported for Neo4j Community edition or for smaller AuraDB instances,
219
219
  as such this feature is off by default.
220
220
 
221
+ ## Using Graphiti with Azure OpenAI
222
+
223
+ Graphiti supports Azure OpenAI for both LLM inference and embeddings. To use Azure OpenAI, you'll need to configure both the LLM client and embedder with your Azure OpenAI credentials.
224
+
225
+ ```python
226
+ from openai import AsyncAzureOpenAI
227
+ from graphiti_core import Graphiti
228
+ from graphiti_core.llm_client import OpenAIClient
229
+ from graphiti_core.embedder.openai import OpenAIEmbedder, OpenAIEmbedderConfig
230
+ from graphiti_core.cross_encoder.openai_reranker_client import OpenAIRerankerClient
231
+
232
+ # Azure OpenAI configuration
233
+ api_key = "<your-api-key>"
234
+ api_version = "<your-api-version>"
235
+ azure_endpoint = "<your-azure-endpoint>"
236
+
237
+ # Create Azure OpenAI client for LLM
238
+ azure_openai_client = AsyncAzureOpenAI(
239
+ api_key=api_key,
240
+ api_version=api_version,
241
+ azure_endpoint=azure_endpoint
242
+ )
243
+
244
+ # Initialize Graphiti with Azure OpenAI clients
245
+ graphiti = Graphiti(
246
+ "bolt://localhost:7687",
247
+ "neo4j",
248
+ "password",
249
+ llm_client=OpenAIClient(
250
+ client=azure_openai_client
251
+ ),
252
+ embedder=OpenAIEmbedder(
253
+ config=OpenAIEmbedderConfig(
254
+ embedding_model="text-embedding-3-small" # Use your Azure deployed embedding model name
255
+ ),
256
+ client=azure_openai_client
257
+ ),
258
+ # Optional: Configure the OpenAI cross encoder with Azure OpenAI
259
+ cross_encoder=OpenAIRerankerClient(
260
+ client=azure_openai_client
261
+ )
262
+ )
263
+
264
+ # Now you can use Graphiti with Azure OpenAI
265
+ ```
266
+
267
+ Make sure to replace the placeholder values with your actual Azure OpenAI credentials and specify the correct embedding model name that's deployed in your Azure OpenAI service.
268
+
221
269
  ## Documentation
222
270
 
223
271
  - [Guides and API documentation](https://help.getzep.com/graphiti).
@@ -228,9 +276,9 @@ as such this feature is off by default.
228
276
 
229
277
  Graphiti is under active development. We aim to maintain API stability while working on:
230
278
 
231
- - [ ] Supporting custom graph schemas:
232
- - Allow developers to provide their own defined node and edge classes when ingesting episodes
233
- - Enable more flexible knowledge representation tailored to specific use cases
279
+ - [x] Supporting custom graph schemas:
280
+ - Allow developers to provide their own defined node and edge classes when ingesting episodes
281
+ - Enable more flexible knowledge representation tailored to specific use cases
234
282
  - [x] Enhancing retrieval capabilities with more robust and configurable options
235
283
  - [ ] Expanding test coverage to ensure reliability and catch edge cases
236
284
 
@@ -1,16 +1,16 @@
1
1
  graphiti_core/__init__.py,sha256=e5SWFkRiaUwfprYIeIgVIh7JDedNiloZvd3roU-0aDY,55
2
- graphiti_core/cross_encoder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ graphiti_core/cross_encoder/__init__.py,sha256=hry59vz21x-AtGZ0MJ7ugw0HTwJkXiddpp_Yqnwsen0,723
3
3
  graphiti_core/cross_encoder/bge_reranker_client.py,sha256=xgXZqB_qoaWQPjnmuf1ne38YPyhhvApySKcQDaHc9R4,1435
4
4
  graphiti_core/cross_encoder/client.py,sha256=KLsbfWKOEaAV3adFe3XZlAeb-gje9_sVKCVZTaJP3ac,1441
5
- graphiti_core/cross_encoder/openai_reranker_client.py,sha256=DECh36QdkM4mE5qgg158ss6G4si2DjgOTqrzGOz5C9s,4089
6
- graphiti_core/edges.py,sha256=A9tlOtSIVYy_OOKfOk5fAmZ13g8euuAe76im6nKJs0o,14766
5
+ graphiti_core/cross_encoder/openai_reranker_client.py,sha256=e-QCftckZ7GLQgJ-ijljw6vzVa6H417cZ3cISB70M5g,4377
6
+ graphiti_core/edges.py,sha256=WXJcJp4fhdlgL4sAsh3YBqkMRAvHKmmaC_7uCPLzePI,15680
7
7
  graphiti_core/embedder/__init__.py,sha256=eWd-0sPxflnYXLoWNT9sxwCIFun5JNO9Fk4E-ZXXf8Y,164
8
8
  graphiti_core/embedder/client.py,sha256=HKIlpPLnzFT81jurPkry6z8F8nxfZVfejdcfxHVUSFU,995
9
- graphiti_core/embedder/openai.py,sha256=FzEM9rtSDK1wTb4iYKjNjjdFf8BEBTDxG2vM_E-5W-8,1621
9
+ graphiti_core/embedder/openai.py,sha256=23BnPA10eiaa1HkxHKYSj75-0PymczPK2FNNIz8Txbc,1910
10
10
  graphiti_core/embedder/voyage.py,sha256=7kqrLG75J3Q6cdA2Nlx1JSYtpk2141ckdl3OtDDw0vU,1882
11
11
  graphiti_core/errors.py,sha256=ddHrHGQxhwkVAtSph4AV84UoOlgwZufMczXPwB7uqPo,1795
12
- graphiti_core/graphiti.py,sha256=BfsR_JF89_bX0D9PJ2Q2IHQrnph9hd4I7-ayGvvZxpU,29231
13
- graphiti_core/helpers.py,sha256=z7ApOgrm_J7hk5FN_XPAwkKyopEY943BgHjDJbSXr2s,2869
12
+ graphiti_core/graphiti.py,sha256=DP2hd1aXIYh-nXVfxugHvw7Tbax50w83IKMZf9Z5BeI,29128
13
+ graphiti_core/helpers.py,sha256=7BQzUBFmoBDA2OIDdFtoN4W-vXOhPRIsF0uDb7PsNi0,2913
14
14
  graphiti_core/llm_client/__init__.py,sha256=PA80TSMeX-sUXITXEAxMDEt3gtfZgcJrGJUcyds1mSo,207
15
15
  graphiti_core/llm_client/anthropic_client.py,sha256=RlD6e49XvMJsTKU0krpq46gPSFm6-hfLkkq4Sfx27BE,2574
16
16
  graphiti_core/llm_client/client.py,sha256=l07SpE_k18rAhK7QbPYTx3pOb2566kYauTlKIOV3rBg,5034
@@ -52,10 +52,10 @@ graphiti_core/utils/maintenance/__init__.py,sha256=TRY3wWWu5kn3Oahk_KKhltrWnh0NA
52
52
  graphiti_core/utils/maintenance/community_operations.py,sha256=gIw1M5HGgc2c3TXag5ygPPpAv5WsG-yoC8Lhmfr6FMs,10011
53
53
  graphiti_core/utils/maintenance/edge_operations.py,sha256=tNw56vN586JYZMgie6RLRTiHZ680-kWzDIxW8ucL6SU,12780
54
54
  graphiti_core/utils/maintenance/graph_data_operations.py,sha256=qds9ALk9PhpQs1CNZTZGpi70mqJ93Y2KhIh9X2r8MUI,6533
55
- graphiti_core/utils/maintenance/node_operations.py,sha256=UGCRjnqVNDjDxd7dIhwC16w0tcrthotlW0gDpLrVuLE,15118
55
+ graphiti_core/utils/maintenance/node_operations.py,sha256=H0DAL2Qau4weIcEIrVVSdntVQtN_y4Hzoik6GldP1XA,15223
56
56
  graphiti_core/utils/maintenance/temporal_operations.py,sha256=RdNtubCyYhOVrvcOIq2WppHls1Q-BEjtsN8r38l-Rtc,3691
57
57
  graphiti_core/utils/maintenance/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
- graphiti_core-0.7.9.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
59
- graphiti_core-0.7.9.dist-info/METADATA,sha256=OIaGI38j09UhXg1_XLJhIuVvQpjhn0zAkvbd26LfUPQ,10541
60
- graphiti_core-0.7.9.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
61
- graphiti_core-0.7.9.dist-info/RECORD,,
58
+ graphiti_core-0.8.1.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
59
+ graphiti_core-0.8.1.dist-info/METADATA,sha256=uc7lXSL18BV_rym3hPlfLvA9l27Mrnx206rWvr9bT00,12583
60
+ graphiti_core-0.8.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
61
+ graphiti_core-0.8.1.dist-info/RECORD,,