graphiti-core 0.2.1__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphiti-core might be problematic. Click here for more details.

@@ -24,6 +24,10 @@ from graphiti_core.edges import EntityEdge, EpisodicEdge
24
24
  from graphiti_core.llm_client import LLMClient
25
25
  from graphiti_core.nodes import EntityNode, EpisodicNode
26
26
  from graphiti_core.prompts import prompt_library
27
+ from graphiti_core.utils.maintenance.temporal_operations import (
28
+ extract_edge_dates,
29
+ get_edge_contradictions,
30
+ )
27
31
 
28
32
  logger = logging.getLogger(__name__)
29
33
 
@@ -33,15 +37,15 @@ def build_episodic_edges(
33
37
  episode: EpisodicNode,
34
38
  created_at: datetime,
35
39
  ) -> List[EpisodicEdge]:
36
- edges: List[EpisodicEdge] = []
37
-
38
- for node in entity_nodes:
39
- edge = EpisodicEdge(
40
+ edges: List[EpisodicEdge] = [
41
+ EpisodicEdge(
40
42
  source_node_uuid=episode.uuid,
41
43
  target_node_uuid=node.uuid,
42
44
  created_at=created_at,
45
+ group_id=episode.group_id,
43
46
  )
44
- edges.append(edge)
47
+ for node in entity_nodes
48
+ ]
45
49
 
46
50
  return edges
47
51
 
@@ -51,6 +55,7 @@ async def extract_edges(
51
55
  episode: EpisodicNode,
52
56
  nodes: list[EntityNode],
53
57
  previous_episodes: list[EpisodicNode],
58
+ group_id: str | None,
54
59
  ) -> list[EntityEdge]:
55
60
  start = time()
56
61
 
@@ -84,6 +89,7 @@ async def extract_edges(
84
89
  source_node_uuid=edge_data['source_node_uuid'],
85
90
  target_node_uuid=edge_data['target_node_uuid'],
86
91
  name=edge_data['relation_type'],
92
+ group_id=group_id,
87
93
  fact=edge_data['fact'],
88
94
  episodes=[episode.uuid],
89
95
  created_at=datetime.now(),
@@ -149,28 +155,110 @@ async def dedupe_extracted_edges(
149
155
  async def resolve_extracted_edges(
150
156
  llm_client: LLMClient,
151
157
  extracted_edges: list[EntityEdge],
158
+ related_edges_lists: list[list[EntityEdge]],
152
159
  existing_edges_lists: list[list[EntityEdge]],
153
- ) -> list[EntityEdge]:
154
- resolved_edges: list[EntityEdge] = list(
160
+ current_episode: EpisodicNode,
161
+ previous_episodes: list[EpisodicNode],
162
+ ) -> tuple[list[EntityEdge], list[EntityEdge]]:
163
+ # resolve edges with related edges in the graph, extract temporal information, and find invalidation candidates
164
+ results: list[tuple[EntityEdge, list[EntityEdge]]] = list(
155
165
  await asyncio.gather(
156
166
  *[
157
- resolve_extracted_edge(llm_client, extracted_edge, existing_edges)
158
- for extracted_edge, existing_edges in zip(extracted_edges, existing_edges_lists)
167
+ resolve_extracted_edge(
168
+ llm_client,
169
+ extracted_edge,
170
+ related_edges,
171
+ existing_edges,
172
+ current_episode,
173
+ previous_episodes,
174
+ )
175
+ for extracted_edge, related_edges, existing_edges in zip(
176
+ extracted_edges, related_edges_lists, existing_edges_lists
177
+ )
159
178
  ]
160
179
  )
161
180
  )
162
181
 
163
- return resolved_edges
182
+ resolved_edges: list[EntityEdge] = []
183
+ invalidated_edges: list[EntityEdge] = []
184
+ for result in results:
185
+ resolved_edge = result[0]
186
+ invalidated_edge_chunk = result[1]
187
+
188
+ resolved_edges.append(resolved_edge)
189
+ invalidated_edges.extend(invalidated_edge_chunk)
190
+
191
+ return resolved_edges, invalidated_edges
164
192
 
165
193
 
166
194
  async def resolve_extracted_edge(
167
- llm_client: LLMClient, extracted_edge: EntityEdge, existing_edges: list[EntityEdge]
195
+ llm_client: LLMClient,
196
+ extracted_edge: EntityEdge,
197
+ related_edges: list[EntityEdge],
198
+ existing_edges: list[EntityEdge],
199
+ current_episode: EpisodicNode,
200
+ previous_episodes: list[EpisodicNode],
201
+ ) -> tuple[EntityEdge, list[EntityEdge]]:
202
+ resolved_edge, (valid_at, invalid_at), invalidation_candidates = await asyncio.gather(
203
+ dedupe_extracted_edge(llm_client, extracted_edge, related_edges),
204
+ extract_edge_dates(llm_client, extracted_edge, current_episode, previous_episodes),
205
+ get_edge_contradictions(llm_client, extracted_edge, existing_edges),
206
+ )
207
+
208
+ now = datetime.now()
209
+
210
+ resolved_edge.valid_at = valid_at if valid_at is not None else resolved_edge.valid_at
211
+ resolved_edge.invalid_at = invalid_at if invalid_at is not None else resolved_edge.invalid_at
212
+ if invalid_at is not None and resolved_edge.expired_at is None:
213
+ resolved_edge.expired_at = now
214
+
215
+ # Determine if the new_edge needs to be expired
216
+ if resolved_edge.expired_at is None:
217
+ invalidation_candidates.sort(key=lambda c: (c.valid_at is None, c.valid_at))
218
+ for candidate in invalidation_candidates:
219
+ if (
220
+ candidate.valid_at is not None and resolved_edge.valid_at is not None
221
+ ) and candidate.valid_at > resolved_edge.valid_at:
222
+ # Expire new edge since we have information about more recent events
223
+ resolved_edge.invalid_at = candidate.valid_at
224
+ resolved_edge.expired_at = now
225
+ break
226
+
227
+ # Determine which contradictory edges need to be expired
228
+ invalidated_edges: list[EntityEdge] = []
229
+ for edge in invalidation_candidates:
230
+ # (Edge invalid before new edge becomes valid) or (new edge invalid before edge becomes valid)
231
+ if (
232
+ edge.invalid_at is not None
233
+ and resolved_edge.valid_at is not None
234
+ and edge.invalid_at < resolved_edge.valid_at
235
+ ) or (
236
+ edge.valid_at is not None
237
+ and resolved_edge.invalid_at is not None
238
+ and resolved_edge.invalid_at < edge.valid_at
239
+ ):
240
+ continue
241
+ # New edge invalidates edge
242
+ elif (
243
+ edge.valid_at is not None
244
+ and resolved_edge.valid_at is not None
245
+ and edge.valid_at < resolved_edge.valid_at
246
+ ):
247
+ edge.invalid_at = resolved_edge.valid_at
248
+ edge.expired_at = edge.expired_at if edge.expired_at is not None else now
249
+ invalidated_edges.append(edge)
250
+
251
+ return resolved_edge, invalidated_edges
252
+
253
+
254
+ async def dedupe_extracted_edge(
255
+ llm_client: LLMClient, extracted_edge: EntityEdge, related_edges: list[EntityEdge]
168
256
  ) -> EntityEdge:
169
257
  start = time()
170
258
 
171
259
  # Prepare context for LLM
172
- existing_edges_context = [
173
- {'uuid': edge.uuid, 'name': edge.name, 'fact': edge.fact} for edge in existing_edges
260
+ related_edges_context = [
261
+ {'uuid': edge.uuid, 'name': edge.name, 'fact': edge.fact} for edge in related_edges
174
262
  ]
175
263
 
176
264
  extracted_edge_context = {
@@ -180,7 +268,7 @@ async def resolve_extracted_edge(
180
268
  }
181
269
 
182
270
  context = {
183
- 'existing_edges': existing_edges_context,
271
+ 'related_edges': related_edges_context,
184
272
  'extracted_edges': extracted_edge_context,
185
273
  }
186
274
 
@@ -191,14 +279,14 @@ async def resolve_extracted_edge(
191
279
 
192
280
  edge = extracted_edge
193
281
  if is_duplicate:
194
- for existing_edge in existing_edges:
282
+ for existing_edge in related_edges:
195
283
  if existing_edge.uuid != uuid:
196
284
  continue
197
285
  edge = existing_edge
198
286
 
199
287
  end = time()
200
288
  logger.info(
201
- f'Resolved node: {extracted_edge.name} is {edge.name}, in {(end - start) * 1000} ms'
289
+ f'Resolved Edge: {extracted_edge.name} is {edge.name}, in {(end - start) * 1000} ms'
202
290
  )
203
291
 
204
292
  return edge
@@ -34,6 +34,10 @@ async def build_indices_and_constraints(driver: AsyncDriver):
34
34
  'CREATE INDEX episode_uuid IF NOT EXISTS FOR (n:Episodic) ON (n.uuid)',
35
35
  'CREATE INDEX relation_uuid IF NOT EXISTS FOR ()-[e:RELATES_TO]-() ON (e.uuid)',
36
36
  'CREATE INDEX mention_uuid IF NOT EXISTS FOR ()-[e:MENTIONS]-() ON (e.uuid)',
37
+ 'CREATE INDEX entity_group_id IF NOT EXISTS FOR (n:Entity) ON (n.group_id)',
38
+ 'CREATE INDEX episode_group_id IF NOT EXISTS FOR (n:Episodic) ON (n.group_id)',
39
+ 'CREATE INDEX relation_group_id IF NOT EXISTS FOR ()-[e:RELATES_TO]-() ON (e.group_id)',
40
+ 'CREATE INDEX mention_group_id IF NOT EXISTS FOR ()-[e:MENTIONS]-() ON (e.group_id)',
37
41
  'CREATE INDEX name_entity_index IF NOT EXISTS FOR (n:Entity) ON (n.name)',
38
42
  'CREATE INDEX created_at_entity_index IF NOT EXISTS FOR (n:Entity) ON (n.created_at)',
39
43
  'CREATE INDEX created_at_episodic_index IF NOT EXISTS FOR (n:Episodic) ON (n.created_at)',
@@ -86,6 +90,7 @@ async def retrieve_episodes(
86
90
  driver: AsyncDriver,
87
91
  reference_time: datetime,
88
92
  last_n: int = EPISODE_WINDOW_LEN,
93
+ group_ids: list[str | None] | None = None,
89
94
  ) -> list[EpisodicNode]:
90
95
  """
91
96
  Retrieve the last n episodic nodes from the graph.
@@ -96,25 +101,28 @@ async def retrieve_episodes(
96
101
  less than or equal to this reference_time will be retrieved. This allows for
97
102
  querying the graph's state at a specific point in time.
98
103
  last_n (int, optional): The number of most recent episodes to retrieve, relative to the reference_time.
104
+ group_ids (list[str], optional): The list of group ids to return data from.
99
105
 
100
106
  Returns:
101
107
  list[EpisodicNode]: A list of EpisodicNode objects representing the retrieved episodes.
102
108
  """
103
109
  result = await driver.execute_query(
104
110
  """
105
- MATCH (e:Episodic) WHERE e.valid_at <= $reference_time
106
- RETURN e.content as content,
107
- e.created_at as created_at,
108
- e.valid_at as valid_at,
109
- e.uuid as uuid,
110
- e.name as name,
111
- e.source_description as source_description,
112
- e.source as source
111
+ MATCH (e:Episodic) WHERE e.valid_at <= $reference_time AND e.group_id in $group_ids
112
+ RETURN e.content AS content,
113
+ e.created_at AS created_at,
114
+ e.valid_at AS valid_at,
115
+ e.uuid AS uuid,
116
+ e.group_id AS group_id,
117
+ e.name AS name,
118
+ e.source_description AS source_description,
119
+ e.source AS source
113
120
  ORDER BY e.created_at DESC
114
121
  LIMIT $num_episodes
115
122
  """,
116
123
  reference_time=reference_time,
117
124
  num_episodes=last_n,
125
+ group_ids=group_ids,
118
126
  )
119
127
  episodes = [
120
128
  EpisodicNode(
@@ -124,6 +132,7 @@ async def retrieve_episodes(
124
132
  ),
125
133
  valid_at=(record['valid_at'].to_native()),
126
134
  uuid=record['uuid'],
135
+ group_id=record['group_id'],
127
136
  source=EpisodeType.from_str(record['source']),
128
137
  name=record['name'],
129
138
  source_description=record['source_description'],
@@ -85,6 +85,7 @@ async def extract_nodes(
85
85
  for node_data in extracted_node_data:
86
86
  new_node = EntityNode(
87
87
  name=node_data['name'],
88
+ group_id=episode.group_id,
88
89
  labels=node_data['labels'],
89
90
  summary=node_data['summary'],
90
91
  created_at=datetime.now(),
@@ -16,6 +16,7 @@ limitations under the License.
16
16
 
17
17
  import logging
18
18
  from datetime import datetime
19
+ from time import time
19
20
  from typing import List
20
21
 
21
22
  from graphiti_core.edges import EntityEdge
@@ -181,3 +182,36 @@ async def extract_edge_dates(
181
182
  logger.info(f'Edge date extraction explanation: {explanation}')
182
183
 
183
184
  return valid_at_datetime, invalid_at_datetime
185
+
186
+
187
+ async def get_edge_contradictions(
188
+ llm_client: LLMClient, new_edge: EntityEdge, existing_edges: list[EntityEdge]
189
+ ) -> list[EntityEdge]:
190
+ start = time()
191
+ existing_edge_map = {edge.uuid: edge for edge in existing_edges}
192
+
193
+ new_edge_context = {'uuid': new_edge.uuid, 'name': new_edge.name, 'fact': new_edge.fact}
194
+ existing_edge_context = [
195
+ {'uuid': existing_edge.uuid, 'name': existing_edge.name, 'fact': existing_edge.fact}
196
+ for existing_edge in existing_edges
197
+ ]
198
+
199
+ context = {'new_edge': new_edge_context, 'existing_edges': existing_edge_context}
200
+
201
+ llm_response = await llm_client.generate_response(prompt_library.invalidate_edges.v2(context))
202
+
203
+ contradicted_edge_data = llm_response.get('invalidated_edges', [])
204
+
205
+ contradicted_edges: list[EntityEdge] = []
206
+ for edge_data in contradicted_edge_data:
207
+ if edge_data['uuid'] in existing_edge_map:
208
+ contradicted_edge = existing_edge_map[edge_data['uuid']]
209
+ contradicted_edge.fact = edge_data['fact']
210
+ contradicted_edges.append(contradicted_edge)
211
+
212
+ end = time()
213
+ logger.info(
214
+ f'Found invalidated edge candidates from {new_edge.fact}, in {(end - start) * 1000} ms'
215
+ )
216
+
217
+ return contradicted_edges
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: graphiti-core
3
- Version: 0.2.1
3
+ Version: 0.2.3
4
4
  Summary: A temporal graph building library
5
5
  License: Apache-2.0
6
6
  Author: Paul Paliychuk
@@ -12,11 +12,10 @@ Classifier: Programming Language :: Python :: 3.10
12
12
  Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
14
  Requires-Dist: diskcache (>=5.6.3,<6.0.0)
15
- Requires-Dist: fastapi (>=0.112.0,<0.113.0)
16
15
  Requires-Dist: neo4j (>=5.23.0,<6.0.0)
16
+ Requires-Dist: numpy (>=2.1.1,<3.0.0)
17
17
  Requires-Dist: openai (>=1.38.0,<2.0.0)
18
18
  Requires-Dist: pydantic (>=2.8.2,<3.0.0)
19
- Requires-Dist: sentence-transformers (>=3.0.1,<4.0.0)
20
19
  Requires-Dist: tenacity (<9.0.0)
21
20
  Description-Content-Type: text/markdown
22
21
 
@@ -173,7 +172,7 @@ graphiti.close()
173
172
 
174
173
  ## Documentation
175
174
 
176
- - [Guides and API documentation](https://help.getzep.com/Graphiti/Graphiti).
175
+ - [Guides and API documentation](https://help.getzep.com/graphiti).
177
176
  - [Quick Start](https://help.getzep.com/graphiti/graphiti/quick-start)
178
177
  - [Building an agent with LangChain's LangGraph and Graphiti](https://help.getzep.com/graphiti/graphiti/lang-graph-agent)
179
178
 
@@ -1,6 +1,6 @@
1
1
  graphiti_core/__init__.py,sha256=e5SWFkRiaUwfprYIeIgVIh7JDedNiloZvd3roU-0aDY,55
2
- graphiti_core/edges.py,sha256=Sxsqw7WZAC6YJKftMaF9t69o7HV_GM6m6ULjtLhZg0M,7484
3
- graphiti_core/graphiti.py,sha256=2skhhx7pgDotUXaXadJ-YGzaZWYmOoSpjOH10PwyBj4,23374
2
+ graphiti_core/edges.py,sha256=oy_tK9YWE7_g4aQMGutymVdreiC-SsWP6ZtayEYGCFQ,7700
3
+ graphiti_core/graphiti.py,sha256=tUEtyBb8hQXTn_eMmVSsFVBV7AKWE22SPQihCMZtcZU,23647
4
4
  graphiti_core/helpers.py,sha256=EAeC3RrcecjiTGN2vxergN5RHTy2_jhFXA5PQVT3toU,200
5
5
  graphiti_core/llm_client/__init__.py,sha256=f4OSk82jJ70wZ2HOuQu6-RQWkkf7HIB0FCT6xOuxZkQ,154
6
6
  graphiti_core/llm_client/anthropic_client.py,sha256=C8lOLm7in_eNfOP7s8gjMM0Y99-TzKWlGaPuVGceX68,2180
@@ -9,29 +9,28 @@ graphiti_core/llm_client/config.py,sha256=d1oZ9tt7QBQlbph7v-0HjItb6otK9_-IwF8kkR
9
9
  graphiti_core/llm_client/groq_client.py,sha256=qscr5-190wBTUCBL31EAjQTLytK9AF75-y9GsVRvGJU,2206
10
10
  graphiti_core/llm_client/openai_client.py,sha256=Bkrp_mKzAxK6kgPzv1UtVUgr1ZvvJhE2H39hgAwWrsI,2211
11
11
  graphiti_core/llm_client/utils.py,sha256=H8-Kwa5SyvIYDNIas8O4bHJ6jsOL49li44VoDEMyauY,555
12
- graphiti_core/nodes.py,sha256=lUSGkWs7EN88qQ1kwwun-t1SWNmTL4z8fOg1dOCqwl0,7879
12
+ graphiti_core/nodes.py,sha256=RZnIKyu9ZzWVlbodae3Rkzlg00fQIqp5o3iGB4Ffm-M,8140
13
13
  graphiti_core/prompts/__init__.py,sha256=EA-x9xUki9l8wnu2l8ek_oNf75-do5tq5hVq7Zbv8Kw,101
14
- graphiti_core/prompts/dedupe_edges.py,sha256=FuZQVZlXTYjskaRUYblToLw4cFjyDp4ECrSf-Y8Z4sU,6530
14
+ graphiti_core/prompts/dedupe_edges.py,sha256=DUNHdIudj50FAjkla4nc68tSFSD2yjmYHBw-Bb7ph20,6529
15
15
  graphiti_core/prompts/dedupe_nodes.py,sha256=BZ9S-PB9SSGjc5Oo8ivdgA6rZx3OGOFhKtwrBlQ0bm0,7269
16
16
  graphiti_core/prompts/extract_edge_dates.py,sha256=G-Gnsyt8pYx9lFJEwlIsTdADF3ESDe26WSsrAGmvlYk,3086
17
17
  graphiti_core/prompts/extract_edges.py,sha256=AQ8xYbAv_RKXAT6WMwXs1_GvUdLtM_lhLNbt3SkOAmk,5348
18
18
  graphiti_core/prompts/extract_nodes.py,sha256=isYly1Yq9tpD-Dlj2JNvKMdsJUqjWMSO16ZFinFxWic,5304
19
- graphiti_core/prompts/invalidate_edges.py,sha256=-BJ5j73fDAhRJa1abs35rKYyo-_OSZYTlQNphfo5Kuk,2993
19
+ graphiti_core/prompts/invalidate_edges.py,sha256=8SHt3iPTdmqk8A52LxgdMtI39w4USKqVDMOS2i6lRQ4,4342
20
20
  graphiti_core/prompts/lib.py,sha256=RR8f8DQfioUK5bJonMzn02pKLxJlaENv1VocpvRJ488,3532
21
21
  graphiti_core/prompts/models.py,sha256=cvx_Bv5RMFUD_5IUawYrbpOKLPHogai7_bm7YXrSz84,867
22
22
  graphiti_core/search/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
- graphiti_core/search/search.py,sha256=IUqAdWub2mg-j9Mz-NacJVLequsG5fxqx2SySKnQtXA,4438
24
- graphiti_core/search/search_utils.py,sha256=0tC_TEPCxB6w8uCUXwpqC27nDog-9bjBNrtwXlyFjIk,21138
23
+ graphiti_core/search/search.py,sha256=cr1-syRlRdijnLtbuQYWy_2G1CtAeIaz6BQ2kl_6FrY,4535
24
+ graphiti_core/search/search_utils.py,sha256=YeJ-M67HXPQySruwZmau3jvilFlcwf8OwfuflnSdf1Q,19355
25
25
  graphiti_core/utils/__init__.py,sha256=cJAcMnBZdHBQmWrZdU1PQ1YmaL75bhVUkyVpIPuOyns,260
26
- graphiti_core/utils/bulk_utils.py,sha256=xwKgHDNiGDt3-jG_YfN4vrHfG-SUxfuBnsFnBANal98,11683
26
+ graphiti_core/utils/bulk_utils.py,sha256=JtoYTZPCigPa3n2E43Oe7QhFZRTA_QKNGy1jVgklHag,12614
27
27
  graphiti_core/utils/maintenance/__init__.py,sha256=4b9sfxqyFZMLwxxS2lnQ6_wBr3xrJRIqfAWOidK8EK0,388
28
- graphiti_core/utils/maintenance/edge_operations.py,sha256=Z9t9Rwnpjcc2obcOG6kAxeqrfQCNak54hT8do2RrITs,7201
29
- graphiti_core/utils/maintenance/graph_data_operations.py,sha256=ggzCWezFyLC29VZBiYHvanOpSRLaPtcmbgHgcl-qHy8,5321
30
- graphiti_core/utils/maintenance/node_operations.py,sha256=1Iswwoqy7HDH_CQACQUq3oQKrX7cNZb1kdkSQOawj84,7956
31
- graphiti_core/utils/maintenance/temporal_operations.py,sha256=XIo3xSYQ_4LFiRDBJ-V03l2-rYeIcq6Id0O1o53yWWY,6865
28
+ graphiti_core/utils/maintenance/edge_operations.py,sha256=Xq60YlOGQKzD5qN6eahUMOiLQJiBaDNOeIiGkS8EdB0,10855
29
+ graphiti_core/utils/maintenance/graph_data_operations.py,sha256=-A4fPYtXIjoBBX6IDPoaU9pDcSjZGeRbRPj23W1C-l4,5951
30
+ graphiti_core/utils/maintenance/node_operations.py,sha256=ecBOp_reQynENFN0M69IzRPgEuBYOuPpDBwFZq5e-I4,7995
31
+ graphiti_core/utils/maintenance/temporal_operations.py,sha256=BzfGDm96w4HcUEsaWTHUBt5S8dNmDQL1eX6AuBL-XFM,8135
32
32
  graphiti_core/utils/maintenance/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
- graphiti_core/utils/utils.py,sha256=LguHvEDi9JruXKWXXHaz2f4vpezdfgY-rpxjPq0dao8,1959
34
- graphiti_core-0.2.1.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
35
- graphiti_core-0.2.1.dist-info/METADATA,sha256=udtA4SQ8-GKtxObQB6dBBrR0t102mla5Dqv3MMUEO5Y,9193
36
- graphiti_core-0.2.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
37
- graphiti_core-0.2.1.dist-info/RECORD,,
33
+ graphiti_core-0.2.3.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
34
+ graphiti_core-0.2.3.dist-info/METADATA,sha256=o81BUoLGtzm0AhnO9MBW8yeG1_UPFN6m-0PmnFOJKis,9124
35
+ graphiti_core-0.2.3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
36
+ graphiti_core-0.2.3.dist-info/RECORD,,
@@ -1,60 +0,0 @@
1
- """
2
- Copyright 2024, Zep Software, Inc.
3
-
4
- Licensed under the Apache License, Version 2.0 (the "License");
5
- you may not use this file except in compliance with the License.
6
- You may obtain a copy of the License at
7
-
8
- http://www.apache.org/licenses/LICENSE-2.0
9
-
10
- Unless required by applicable law or agreed to in writing, software
11
- distributed under the License is distributed on an "AS IS" BASIS,
12
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- See the License for the specific language governing permissions and
14
- limitations under the License.
15
- """
16
-
17
- import logging
18
- from collections import defaultdict
19
-
20
- from graphiti_core.edges import EntityEdge, EpisodicEdge
21
- from graphiti_core.nodes import EntityNode, EpisodicNode
22
-
23
- logger = logging.getLogger(__name__)
24
-
25
-
26
- def build_episodic_edges(
27
- entity_nodes: list[EntityNode], episode: EpisodicNode
28
- ) -> list[EpisodicEdge]:
29
- edges: list[EpisodicEdge] = []
30
-
31
- for node in entity_nodes:
32
- edges.append(
33
- EpisodicEdge(
34
- source_node_uuid=episode.uuid,
35
- target_node_uuid=node.uuid,
36
- created_at=episode.created_at,
37
- )
38
- )
39
-
40
- return edges
41
-
42
-
43
- def chunk_edges_by_nodes(edges: list[EntityEdge]) -> list[list[EntityEdge]]:
44
- # We only want to dedupe edges that are between the same pair of nodes
45
- # We build a map of the edges based on their source and target nodes.
46
- edge_chunk_map: dict[str, list[EntityEdge]] = defaultdict(list)
47
- for edge in edges:
48
- # We drop loop edges
49
- if edge.source_node_uuid == edge.target_node_uuid:
50
- continue
51
-
52
- # Keep the order of the two nodes consistent, we want to be direction agnostic during edge resolution
53
- pointers = [edge.source_node_uuid, edge.target_node_uuid]
54
- pointers.sort()
55
-
56
- edge_chunk_map[pointers[0] + pointers[1]].append(edge)
57
-
58
- edge_chunks = [chunk for chunk in edge_chunk_map.values()]
59
-
60
- return edge_chunks