graphiti-core 0.12.0rc3__py3-none-any.whl → 0.12.0rc5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphiti-core might be problematic. Click here for more details.

@@ -137,8 +137,12 @@ def nodes(context: dict[str, Any]) -> list[Message]:
137
137
  <ENTITIES>
138
138
  {json.dumps(context['extracted_nodes'], indent=2)}
139
139
  </ENTITIES>
140
+
141
+ <EXISTING ENTITIES>
142
+ {json.dumps(context['existing_nodes'], indent=2)}
143
+ </EXISTING ENTITIES>
140
144
 
141
- For each of the above ENTITIES, determine if the entity is a duplicate of any of its duplication candidates.
145
+ For each of the above ENTITIES, determine if the entity is a duplicate of any of the EXISTING ENTITIES.
142
146
 
143
147
  Entities should only be considered duplicates if they refer to the *same real-world object or concept*.
144
148
 
@@ -152,9 +156,9 @@ def nodes(context: dict[str, Any]) -> list[Message]:
152
156
  For each entity, return the id of the entity as id, the name of the entity as name, and the duplicate_idx
153
157
  as an integer.
154
158
 
155
- - If an entity is a duplicate of one of its duplication_candidates, return the idx of the candidate it is a
159
+ - If an entity is a duplicate of one of the EXISTING ENTITIES, return the idx of the candidate it is a
156
160
  duplicate of.
157
- - If an entity is not a duplicate of one of its duplication candidates, return the -1 as the duplication_idx
161
+ - If an entity is not a duplicate of one of the EXISTING ENTITIES, return the -1 as the duplication_idx
158
162
  """,
159
163
  ),
160
164
  ]
@@ -24,8 +24,8 @@ from .models import Message, PromptFunction, PromptVersion
24
24
 
25
25
  class Edge(BaseModel):
26
26
  relation_type: str = Field(..., description='FACT_PREDICATE_IN_SCREAMING_SNAKE_CASE')
27
- source_entity_name: str = Field(..., description='The name of the source entity of the fact.')
28
- target_entity_name: str = Field(..., description='The name of the target entity of the fact.')
27
+ source_entity_id: int = Field(..., description='The id of the source entity of the fact.')
28
+ target_entity_id: int = Field(..., description='The id of the target entity of the fact.')
29
29
  fact: str = Field(..., description='')
30
30
  valid_at: str | None = Field(
31
31
  None,
@@ -77,7 +77,7 @@ def edge(context: dict[str, Any]) -> list[Message]:
77
77
  </CURRENT_MESSAGE>
78
78
 
79
79
  <ENTITIES>
80
- {context['nodes']} # Each has: id, label (e.g., Person, Org), name, aliases
80
+ {context['nodes']}
81
81
  </ENTITIES>
82
82
 
83
83
  <REFERENCE_TIME>
@@ -94,8 +94,9 @@ Only extract facts that:
94
94
  - involve two DISTINCT ENTITIES from the ENTITIES list,
95
95
  - are clearly stated or unambiguously implied in the CURRENT MESSAGE,
96
96
  and can be represented as edges in a knowledge graph.
97
- - The FACT TYPES provide a list of the most important types of facts, make sure to extract any facts that
98
- could be classified into one of the provided fact types
97
+ - The FACT TYPES provide a list of the most important types of facts, make sure to extract facts of these types
98
+ - The FACT TYPES are not an exhaustive list, extract all facts from the message even if they do not fit into one
99
+ of the FACT TYPES
99
100
 
100
101
  You may use information from the PREVIOUS MESSAGES only to disambiguate references or support continuity.
101
102
 
@@ -95,12 +95,12 @@ def edge_search_filter_query_constructor(
95
95
  and_filter_query = ''
96
96
  for j, and_filter in enumerate(and_filters):
97
97
  and_filter_query += and_filter
98
- if j != len(and_filter_query) - 1:
98
+ if j != len(and_filters) - 1:
99
99
  and_filter_query += ' AND '
100
100
 
101
101
  valid_at_filter += and_filter_query
102
102
 
103
- if i == len(or_list) - 1:
103
+ if i == len(filters.valid_at) - 1:
104
104
  valid_at_filter += ')'
105
105
  else:
106
106
  valid_at_filter += ' OR '
@@ -120,12 +120,12 @@ def edge_search_filter_query_constructor(
120
120
  and_filter_query = ''
121
121
  for j, and_filter in enumerate(and_filters):
122
122
  and_filter_query += and_filter
123
- if j != len(and_filter_query) - 1:
123
+ if j != len(and_filters) - 1:
124
124
  and_filter_query += ' AND '
125
125
 
126
126
  invalid_at_filter += and_filter_query
127
127
 
128
- if i == len(or_list) - 1:
128
+ if i == len(filters.invalid_at) - 1:
129
129
  invalid_at_filter += ')'
130
130
  else:
131
131
  invalid_at_filter += ' OR '
@@ -145,12 +145,12 @@ def edge_search_filter_query_constructor(
145
145
  and_filter_query = ''
146
146
  for j, and_filter in enumerate(and_filters):
147
147
  and_filter_query += and_filter
148
- if j != len(and_filter_query) - 1:
148
+ if j != len(and_filters) - 1:
149
149
  and_filter_query += ' AND '
150
150
 
151
151
  created_at_filter += and_filter_query
152
152
 
153
- if i == len(or_list) - 1:
153
+ if i == len(filters.created_at) - 1:
154
154
  created_at_filter += ')'
155
155
  else:
156
156
  created_at_filter += ' OR '
@@ -170,12 +170,12 @@ def edge_search_filter_query_constructor(
170
170
  and_filter_query = ''
171
171
  for j, and_filter in enumerate(and_filters):
172
172
  and_filter_query += and_filter
173
- if j != len(and_filter_query) - 1:
173
+ if j != len(and_filters) - 1:
174
174
  and_filter_query += ' AND '
175
175
 
176
176
  expired_at_filter += and_filter_query
177
177
 
178
- if i == len(or_list) - 1:
178
+ if i == len(filters.expired_at) - 1:
179
179
  expired_at_filter += ')'
180
180
  else:
181
181
  expired_at_filter += ' OR '
@@ -92,8 +92,6 @@ async def extract_edges(
92
92
  extract_edges_max_tokens = 16384
93
93
  llm_client = clients.llm_client
94
94
 
95
- node_uuids_by_name_map = {node.name: node.uuid for node in nodes}
96
-
97
95
  edge_types_context = (
98
96
  [
99
97
  {
@@ -109,7 +107,7 @@ async def extract_edges(
109
107
  # Prepare context for LLM
110
108
  context = {
111
109
  'episode_content': episode.content,
112
- 'nodes': [node.name for node in nodes],
110
+ 'nodes': [{'id': idx, 'name': node.name} for idx, node in enumerate(nodes)],
113
111
  'previous_episodes': [ep.content for ep in previous_episodes],
114
112
  'reference_time': episode.valid_at,
115
113
  'edge_types': edge_types_context,
@@ -160,14 +158,16 @@ async def extract_edges(
160
158
  invalid_at = edge_data.get('invalid_at', None)
161
159
  valid_at_datetime = None
162
160
  invalid_at_datetime = None
163
- source_node_uuid = node_uuids_by_name_map.get(edge_data.get('source_entity_name', ''), '')
164
- target_node_uuid = node_uuids_by_name_map.get(edge_data.get('target_entity_name', ''), '')
165
161
 
166
- if source_node_uuid == '' or target_node_uuid == '':
162
+ source_node_idx = edge_data.get('source_entity_id', -1)
163
+ target_node_idx = edge_data.get('target_entity_id', -1)
164
+ if not (-1 < source_node_idx < len(nodes) and -1 < target_node_idx < len(nodes)):
167
165
  logger.warning(
168
- f'WARNING: source or target node not filled {edge_data.get("edge_name")}. source_node_uuid: {source_node_uuid} and target_node_uuid: {target_node_uuid} '
166
+ f'WARNING: source or target node not filled {edge_data.get("edge_name")}. source_node_uuid: {source_node_idx} and target_node_uuid: {target_node_idx} '
169
167
  )
170
168
  continue
169
+ source_node_uuid = nodes[source_node_idx].uuid
170
+ target_node_uuid = nodes[edge_data.get('target_entity_id')].uuid
171
171
 
172
172
  if valid_at:
173
173
  try:
@@ -29,7 +29,7 @@ from graphiti_core.llm_client import LLMClient
29
29
  from graphiti_core.llm_client.config import ModelSize
30
30
  from graphiti_core.nodes import EntityNode, EpisodeType, EpisodicNode, create_entity_node_embeddings
31
31
  from graphiti_core.prompts import prompt_library
32
- from graphiti_core.prompts.dedupe_nodes import NodeDuplicate, NodeResolutions
32
+ from graphiti_core.prompts.dedupe_nodes import NodeResolutions
33
33
  from graphiti_core.prompts.extract_nodes import (
34
34
  ExtractedEntities,
35
35
  ExtractedEntity,
@@ -241,7 +241,25 @@ async def resolve_extracted_nodes(
241
241
  ]
242
242
  )
243
243
 
244
- existing_nodes_lists: list[list[EntityNode]] = [result.nodes for result in search_results]
244
+ existing_nodes_dict: dict[str, EntityNode] = {
245
+ node.uuid: node for result in search_results for node in result.nodes
246
+ }
247
+
248
+ existing_nodes: list[EntityNode] = list(existing_nodes_dict.values())
249
+
250
+ existing_nodes_context = (
251
+ [
252
+ {
253
+ **{
254
+ 'idx': i,
255
+ 'name': candidate.name,
256
+ 'entity_types': candidate.labels,
257
+ },
258
+ **candidate.attributes,
259
+ }
260
+ for i, candidate in enumerate(existing_nodes)
261
+ ],
262
+ )
245
263
 
246
264
  entity_types_dict: dict[str, BaseModel] = entity_types if entity_types is not None else {}
247
265
 
@@ -255,23 +273,13 @@ async def resolve_extracted_nodes(
255
273
  next((item for item in node.labels if item != 'Entity'), '')
256
274
  ).__doc__
257
275
  or 'Default Entity Type',
258
- 'duplication_candidates': [
259
- {
260
- **{
261
- 'idx': j,
262
- 'name': candidate.name,
263
- 'entity_types': candidate.labels,
264
- },
265
- **candidate.attributes,
266
- }
267
- for j, candidate in enumerate(existing_nodes_lists[i])
268
- ],
269
276
  }
270
277
  for i, node in enumerate(extracted_nodes)
271
278
  ]
272
279
 
273
280
  context = {
274
281
  'extracted_nodes': extracted_nodes_context,
282
+ 'existing_nodes': existing_nodes_context,
275
283
  'episode_content': episode.content if episode is not None else '',
276
284
  'previous_episodes': [ep.content for ep in previous_episodes]
277
285
  if previous_episodes is not None
@@ -294,8 +302,8 @@ async def resolve_extracted_nodes(
294
302
  extracted_node = extracted_nodes[resolution_id]
295
303
 
296
304
  resolved_node = (
297
- existing_nodes_lists[resolution_id][duplicate_idx]
298
- if 0 <= duplicate_idx < len(existing_nodes_lists[resolution_id])
305
+ existing_nodes[duplicate_idx]
306
+ if 0 <= duplicate_idx < len(existing_nodes)
299
307
  else extracted_node
300
308
  )
301
309
 
@@ -309,70 +317,6 @@ async def resolve_extracted_nodes(
309
317
  return resolved_nodes, uuid_map
310
318
 
311
319
 
312
- async def resolve_extracted_node(
313
- llm_client: LLMClient,
314
- extracted_node: EntityNode,
315
- existing_nodes: list[EntityNode],
316
- episode: EpisodicNode | None = None,
317
- previous_episodes: list[EpisodicNode] | None = None,
318
- entity_type: BaseModel | None = None,
319
- ) -> EntityNode:
320
- start = time()
321
- if len(existing_nodes) == 0:
322
- return extracted_node
323
-
324
- # Prepare context for LLM
325
- existing_nodes_context = [
326
- {
327
- **{
328
- 'id': i,
329
- 'name': node.name,
330
- 'entity_types': node.labels,
331
- },
332
- **node.attributes,
333
- }
334
- for i, node in enumerate(existing_nodes)
335
- ]
336
-
337
- extracted_node_context = {
338
- 'name': extracted_node.name,
339
- 'entity_type': entity_type.__name__ if entity_type is not None else 'Entity', # type: ignore
340
- }
341
-
342
- context = {
343
- 'existing_nodes': existing_nodes_context,
344
- 'extracted_node': extracted_node_context,
345
- 'entity_type_description': entity_type.__doc__
346
- if entity_type is not None
347
- else 'Default Entity Type',
348
- 'episode_content': episode.content if episode is not None else '',
349
- 'previous_episodes': [ep.content for ep in previous_episodes]
350
- if previous_episodes is not None
351
- else [],
352
- }
353
-
354
- llm_response = await llm_client.generate_response(
355
- prompt_library.dedupe_nodes.node(context),
356
- response_model=NodeDuplicate,
357
- model_size=ModelSize.small,
358
- )
359
-
360
- duplicate_id: int = llm_response.get('duplicate_node_id', -1)
361
-
362
- node = (
363
- existing_nodes[duplicate_id] if 0 <= duplicate_id < len(existing_nodes) else extracted_node
364
- )
365
-
366
- node.name = llm_response.get('name', '')
367
-
368
- end = time()
369
- logger.debug(
370
- f'Resolved node: {extracted_node.name} is {node.name}, in {(end - start) * 1000} ms'
371
- )
372
-
373
- return node
374
-
375
-
376
320
  async def extract_attributes_from_nodes(
377
321
  clients: GraphitiClients,
378
322
  nodes: list[EntityNode],
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: graphiti-core
3
- Version: 0.12.0rc3
3
+ Version: 0.12.0rc5
4
4
  Summary: A temporal graph building library
5
5
  License: Apache-2.0
6
6
  Author: Paul Paliychuk
@@ -22,7 +22,7 @@ Requires-Dist: groq (>=0.2.0) ; extra == "groq"
22
22
  Requires-Dist: neo4j (>=5.23.0)
23
23
  Requires-Dist: numpy (>=1.0.0)
24
24
  Requires-Dist: openai (>=1.53.0)
25
- Requires-Dist: pydantic (>=2.8.2)
25
+ Requires-Dist: pydantic (>=2.11.5)
26
26
  Requires-Dist: python-dotenv (>=1.0.1)
27
27
  Requires-Dist: tenacity (>=9.0.0)
28
28
  Project-URL: Homepage, https://help.getzep.com/graphiti/graphiti/overview
@@ -236,7 +236,7 @@ Graphiti supports Azure OpenAI for both LLM inference and embeddings. To use Azu
236
236
  ```python
237
237
  from openai import AsyncAzureOpenAI
238
238
  from graphiti_core import Graphiti
239
- from graphiti_core.llm_client import OpenAIClient
239
+ from graphiti_core.llm_client import LLMConfig, OpenAIClient
240
240
  from graphiti_core.embedder.openai import OpenAIEmbedder, OpenAIEmbedderConfig
241
241
  from graphiti_core.cross_encoder.openai_reranker_client import OpenAIRerankerClient
242
242
 
@@ -252,12 +252,19 @@ azure_openai_client = AsyncAzureOpenAI(
252
252
  azure_endpoint=azure_endpoint
253
253
  )
254
254
 
255
+ # Create LLM Config with your Azure deployed model names
256
+ azure_llm_config = LLMConfig(
257
+ small_model="gpt-4.1-nano",
258
+ model="gpt-4.1-mini",
259
+ )
260
+
255
261
  # Initialize Graphiti with Azure OpenAI clients
256
262
  graphiti = Graphiti(
257
263
  "bolt://localhost:7687",
258
264
  "neo4j",
259
265
  "password",
260
266
  llm_client=OpenAIClient(
267
+ llm_config=azure_llm_config,
261
268
  client=azure_openai_client
262
269
  ),
263
270
  embedder=OpenAIEmbedder(
@@ -268,6 +275,7 @@ graphiti = Graphiti(
268
275
  ),
269
276
  # Optional: Configure the OpenAI cross encoder with Azure OpenAI
270
277
  cross_encoder=OpenAIRerankerClient(
278
+ llm_config=azure_llm_config,
271
279
  client=azure_openai_client
272
280
  )
273
281
  )
@@ -31,10 +31,10 @@ graphiti_core/models/nodes/node_db_queries.py,sha256=AQgRGVO-GgFWfLq1G6k8s86WItw
31
31
  graphiti_core/nodes.py,sha256=U19DZ0MIi8GfEsx8D-Jgl8c2SGXO8QovVQpYy6FmUpo,18542
32
32
  graphiti_core/prompts/__init__.py,sha256=EA-x9xUki9l8wnu2l8ek_oNf75-do5tq5hVq7Zbv8Kw,101
33
33
  graphiti_core/prompts/dedupe_edges.py,sha256=AFVC1EQ0TvNkSp0G7QZmIh3YpGg9FVXo1_sT3TlRqA8,5473
34
- graphiti_core/prompts/dedupe_nodes.py,sha256=hUdlEUaYUJGLeX6Usy_hfF7fVkaZW-Qhuq5hYrgQ2ZM,7298
34
+ graphiti_core/prompts/dedupe_nodes.py,sha256=OIhMkKexRpQQ0dEr4NW_WE1ta7wLO3RibJA7Ge41uDg,7407
35
35
  graphiti_core/prompts/eval.py,sha256=gnBQTmwsCl3Qvwpcm7aieVszzo6y1sMCUT8jQiKTvvE,5317
36
36
  graphiti_core/prompts/extract_edge_dates.py,sha256=3Drs3CmvP0gJN5BidWSxrNvLet3HPoTybU3BUIAoc0Y,4218
37
- graphiti_core/prompts/extract_edges.py,sha256=i7fXBVZ_FH_sAP413T8D02yylIEIia7scaTOuc3dkwY,6497
37
+ graphiti_core/prompts/extract_edges.py,sha256=9NdxAKyXHiFOSuyAzzxRM38BmqtynGEbtmMUr3VTrtM,6513
38
38
  graphiti_core/prompts/extract_nodes.py,sha256=EYuX99P8ly7pSOBz87ZA9fJF8V6g6epbVj5Cq0YM8h8,9624
39
39
  graphiti_core/prompts/invalidate_edges.py,sha256=yfpcs_pyctnoM77ULPZXEtKW0oHr1MeLsJzC5yrE-o4,3547
40
40
  graphiti_core/prompts/lib.py,sha256=DCyHePM4_q-CptTpEXGO_dBv9k7xDtclEaB1dGu7EcI,4092
@@ -46,7 +46,7 @@ graphiti_core/search/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
46
46
  graphiti_core/search/search.py,sha256=XCEYz4-I341eWiZ-czeFlH5hdbHTTLymhHiD153p6DQ,15122
47
47
  graphiti_core/search/search_config.py,sha256=VvKg6AB_RPhoe56DBBXHRBXHThAVJ_OLFCyq_yKof-A,3765
48
48
  graphiti_core/search/search_config_recipes.py,sha256=4GquRphHhJlpXQhAZOySYnCzBWYoTwxlJj44eTOavZQ,7443
49
- graphiti_core/search/search_filters.py,sha256=hT35bKsFyPwocB-8EH2vh6jNev_AXvtdq-l2egdoZbk,6444
49
+ graphiti_core/search/search_filters.py,sha256=jG30nMWX03xoT9ohgyHNu_Xes8GwjIF2eTv6QaiWMqw,6466
50
50
  graphiti_core/search/search_helpers.py,sha256=G5Ceaq5Pfgx0Weelqgeylp_pUHwiBnINaUYsDbURJbE,2636
51
51
  graphiti_core/search/search_utils.py,sha256=AimBkRgvSFHqAkt1vraTVj_bVAp3JKrR6JUMpoZa8RI,34469
52
52
  graphiti_core/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -54,13 +54,13 @@ graphiti_core/utils/bulk_utils.py,sha256=WFCfo_OrFD2bpm13Vkex4A1YLVHX4pjCm5acZ1C
54
54
  graphiti_core/utils/datetime_utils.py,sha256=Ti-2tnrDFRzBsbfblzsHybsM3jaDLP4-VT2t0VhpIzU,1357
55
55
  graphiti_core/utils/maintenance/__init__.py,sha256=vW4H1KyapTl-OOz578uZABYcpND4wPx3Vt6aAPaXh78,301
56
56
  graphiti_core/utils/maintenance/community_operations.py,sha256=TF-4eHuvMe_jMqvWg3swxK80zLLtOR0t1pmUUQlNulM,10067
57
- graphiti_core/utils/maintenance/edge_operations.py,sha256=9_vC3piLUlGM-C30Z4DsN6UWQoxbabsSlJYD7z1zsr4,19222
57
+ graphiti_core/utils/maintenance/edge_operations.py,sha256=5Sq6HgP_s3rutbWov8wK3buEQEL8MDszVosztfmYmcI,19273
58
58
  graphiti_core/utils/maintenance/graph_data_operations.py,sha256=BIJKc8tbvU4IjWxLgeotw57b1eE3Iw8YtV74j6eo4RQ,7493
59
- graphiti_core/utils/maintenance/node_operations.py,sha256=xuXKY0aoe_Idl9Edtb8FxSqoCa45M043nCMraJuAcW8,16606
59
+ graphiti_core/utils/maintenance/node_operations.py,sha256=uChQtyIgBjl9L5nxlO9gYMAeJ8PqU08a63cmiGt36Ss,14796
60
60
  graphiti_core/utils/maintenance/temporal_operations.py,sha256=mJkw9xLB4W2BsLfC5POr0r-PHWL9SIfNj_l_xu0B5ug,3410
61
61
  graphiti_core/utils/maintenance/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
62
  graphiti_core/utils/ontology_utils/entity_types_utils.py,sha256=QJX5cG0GSSNF_Mm_yrldr69wjVAbN_MxLhOSznz85Hk,1279
63
- graphiti_core-0.12.0rc3.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
64
- graphiti_core-0.12.0rc3.dist-info/METADATA,sha256=UpVXSKHsEuMupOCGVD6jAuknoTyuhMB9pQEfv0okJ3g,15301
65
- graphiti_core-0.12.0rc3.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
66
- graphiti_core-0.12.0rc3.dist-info/RECORD,,
63
+ graphiti_core-0.12.0rc5.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
64
+ graphiti_core-0.12.0rc5.dist-info/METADATA,sha256=K7Hw4oCdcN-1ofwSojPFlBi19P7CILK-IyN4eatafmw,15535
65
+ graphiti_core-0.12.0rc5.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
66
+ graphiti_core-0.12.0rc5.dist-info/RECORD,,