graphiti-core 0.9.2__tar.gz → 0.9.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphiti-core might be problematic. Click here for more details.

Files changed (65) hide show
  1. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/PKG-INFO +25 -11
  2. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/README.md +10 -2
  3. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/cross_encoder/bge_reranker_client.py +1 -1
  4. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/cross_encoder/openai_reranker_client.py +1 -1
  5. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/embedder/openai.py +1 -2
  6. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/embedder/voyage.py +1 -1
  7. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/graphiti.py +1 -1
  8. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/llm_client/client.py +1 -1
  9. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/prompts/dedupe_edges.py +2 -2
  10. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/prompts/dedupe_nodes.py +3 -3
  11. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/prompts/extract_edge_dates.py +4 -4
  12. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/prompts/extract_edges.py +5 -5
  13. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/prompts/extract_nodes.py +7 -7
  14. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/prompts/models.py +2 -1
  15. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/utils/maintenance/community_operations.py +1 -1
  16. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/utils/maintenance/edge_operations.py +1 -1
  17. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/utils/maintenance/node_operations.py +3 -1
  18. graphiti_core-0.9.4/pyproject.toml +86 -0
  19. graphiti_core-0.9.2/pyproject.toml +0 -80
  20. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/LICENSE +0 -0
  21. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/__init__.py +0 -0
  22. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/cross_encoder/__init__.py +0 -0
  23. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/cross_encoder/client.py +0 -0
  24. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/edges.py +0 -0
  25. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/embedder/__init__.py +0 -0
  26. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/embedder/client.py +0 -0
  27. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/embedder/gemini.py +0 -0
  28. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/errors.py +0 -0
  29. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/helpers.py +0 -0
  30. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/llm_client/__init__.py +0 -0
  31. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/llm_client/anthropic_client.py +0 -0
  32. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/llm_client/config.py +0 -0
  33. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/llm_client/errors.py +0 -0
  34. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/llm_client/gemini_client.py +0 -0
  35. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/llm_client/groq_client.py +0 -0
  36. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/llm_client/openai_client.py +0 -0
  37. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/llm_client/openai_generic_client.py +0 -0
  38. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/llm_client/utils.py +0 -0
  39. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/models/__init__.py +0 -0
  40. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/models/edges/__init__.py +0 -0
  41. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/models/edges/edge_db_queries.py +0 -0
  42. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/models/nodes/__init__.py +0 -0
  43. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/models/nodes/node_db_queries.py +0 -0
  44. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/nodes.py +0 -0
  45. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/prompts/__init__.py +0 -0
  46. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/prompts/eval.py +0 -0
  47. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/prompts/invalidate_edges.py +0 -0
  48. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/prompts/lib.py +0 -0
  49. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/prompts/prompt_helpers.py +0 -0
  50. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/prompts/summarize_nodes.py +0 -0
  51. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/py.typed +0 -0
  52. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/search/__init__.py +0 -0
  53. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/search/search.py +0 -0
  54. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/search/search_config.py +0 -0
  55. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/search/search_config_recipes.py +0 -0
  56. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/search/search_filters.py +0 -0
  57. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/search/search_utils.py +0 -0
  58. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/utils/__init__.py +0 -0
  59. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/utils/bulk_utils.py +0 -0
  60. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/utils/datetime_utils.py +0 -0
  61. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/utils/maintenance/__init__.py +0 -0
  62. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/utils/maintenance/graph_data_operations.py +0 -0
  63. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/utils/maintenance/temporal_operations.py +0 -0
  64. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/utils/maintenance/utils.py +0 -0
  65. {graphiti_core-0.9.2 → graphiti_core-0.9.4}/graphiti_core/utils/ontology_utils/entity_types_utils.py +0 -0
@@ -1,26 +1,32 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: graphiti-core
3
- Version: 0.9.2
3
+ Version: 0.9.4
4
4
  Summary: A temporal graph building library
5
5
  License: Apache-2.0
6
6
  Author: Paul Paliychuk
7
7
  Author-email: paul@getzep.com
8
- Requires-Python: >=3.10,<4.0
8
+ Requires-Python: >=3.10,<4
9
9
  Classifier: License :: OSI Approved :: Apache Software License
10
10
  Classifier: Programming Language :: Python :: 3
11
11
  Classifier: Programming Language :: Python :: 3.10
12
12
  Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
+ Classifier: Programming Language :: Python :: 3.13
14
15
  Provides-Extra: anthropic
15
16
  Provides-Extra: google-genai
16
17
  Provides-Extra: groq
17
- Requires-Dist: diskcache (>=5.6.3,<6.0.0)
18
- Requires-Dist: neo4j (>=5.23.0,<6.0.0)
18
+ Requires-Dist: anthropic (>=0.49.0) ; extra == "anthropic"
19
+ Requires-Dist: diskcache (>=5.6.3)
20
+ Requires-Dist: google-genai (>=1.8.0) ; extra == "google-genai"
21
+ Requires-Dist: groq (>=0.2.0) ; extra == "groq"
22
+ Requires-Dist: neo4j (>=5.23.0)
19
23
  Requires-Dist: numpy (>=1.0.0)
20
- Requires-Dist: openai (>=1.53.0,<2.0.0)
21
- Requires-Dist: pydantic (>=2.8.2,<3.0.0)
22
- Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
23
- Requires-Dist: tenacity (==9.0.0)
24
+ Requires-Dist: openai (>=1.53.0)
25
+ Requires-Dist: pydantic (>=2.8.2)
26
+ Requires-Dist: python-dotenv (>=1.0.1)
27
+ Requires-Dist: tenacity (>=9.0.0)
28
+ Project-URL: Homepage, https://help.getzep.com/graphiti/graphiti/overview
29
+ Project-URL: Repository, https://github.com/getzep/graphiti
24
30
  Description-Content-Type: text/markdown
25
31
 
26
32
  <p align="center">
@@ -256,6 +262,16 @@ Make sure to replace the placeholder values with your actual Azure OpenAI creden
256
262
 
257
263
  Graphiti supports Google's Gemini models for both LLM inference and embeddings. To use Gemini, you'll need to configure both the LLM client and embedder with your Google API key.
258
264
 
265
+ Install Graphiti:
266
+
267
+ ```bash
268
+ poetry add "graphiti-core[google-genai]"
269
+
270
+ # or
271
+
272
+ uv add "graphiti-core[google-genai]"
273
+ ```
274
+
259
275
  ```python
260
276
  from graphiti_core import Graphiti
261
277
  from graphiti_core.llm_client.gemini_client import GeminiClient, LLMConfig
@@ -286,8 +302,6 @@ graphiti = Graphiti(
286
302
  # Now you can use Graphiti with Google Gemini
287
303
  ```
288
304
 
289
- Make sure to replace the placeholder value with your actual Google API key. You can find more details in the example file at `examples/gemini_example.py`.
290
-
291
305
  ## Documentation
292
306
 
293
307
  - [Guides and API documentation](https://help.getzep.com/graphiti).
@@ -231,6 +231,16 @@ Make sure to replace the placeholder values with your actual Azure OpenAI creden
231
231
 
232
232
  Graphiti supports Google's Gemini models for both LLM inference and embeddings. To use Gemini, you'll need to configure both the LLM client and embedder with your Google API key.
233
233
 
234
+ Install Graphiti:
235
+
236
+ ```bash
237
+ poetry add "graphiti-core[google-genai]"
238
+
239
+ # or
240
+
241
+ uv add "graphiti-core[google-genai]"
242
+ ```
243
+
234
244
  ```python
235
245
  from graphiti_core import Graphiti
236
246
  from graphiti_core.llm_client.gemini_client import GeminiClient, LLMConfig
@@ -261,8 +271,6 @@ graphiti = Graphiti(
261
271
  # Now you can use Graphiti with Google Gemini
262
272
  ```
263
273
 
264
- Make sure to replace the placeholder value with your actual Google API key. You can find more details in the example file at `examples/gemini_example.py`.
265
-
266
274
  ## Documentation
267
275
 
268
276
  - [Guides and API documentation](https://help.getzep.com/graphiti).
@@ -36,7 +36,7 @@ class BGERerankerClient(CrossEncoderClient):
36
36
  scores = await loop.run_in_executor(None, self.model.predict, input_pairs)
37
37
 
38
38
  ranked_passages = sorted(
39
- [(passage, float(score)) for passage, score in zip(passages, scores)],
39
+ [(passage, float(score)) for passage, score in zip(passages, scores, strict=False)],
40
40
  key=lambda x: x[1],
41
41
  reverse=True,
42
42
  )
@@ -111,7 +111,7 @@ class OpenAIRerankerClient(CrossEncoderClient):
111
111
  if bool(logprob.token):
112
112
  scores.append(logprob.logprob)
113
113
 
114
- results = [(passage, score) for passage, score in zip(passages, scores)]
114
+ results = [(passage, score) for passage, score in zip(passages, scores, strict=False)]
115
115
  results.sort(reverse=True, key=lambda x: x[1])
116
116
  return results
117
117
  except openai.RateLimitError as e:
@@ -15,7 +15,6 @@ limitations under the License.
15
15
  """
16
16
 
17
17
  from collections.abc import Iterable
18
- from typing import Union
19
18
 
20
19
  from openai import AsyncAzureOpenAI, AsyncOpenAI
21
20
  from openai.types import EmbeddingModel
@@ -41,7 +40,7 @@ class OpenAIEmbedder(EmbedderClient):
41
40
  def __init__(
42
41
  self,
43
42
  config: OpenAIEmbedderConfig | None = None,
44
- client: Union[AsyncOpenAI, AsyncAzureOpenAI, None] = None,
43
+ client: AsyncOpenAI | AsyncAzureOpenAI | None = None,
45
44
  ):
46
45
  if config is None:
47
46
  config = OpenAIEmbedderConfig()
@@ -55,4 +55,4 @@ class VoyageAIEmbedder(EmbedderClient):
55
55
  return []
56
56
 
57
57
  result = await self.client.embed(input_list, model=self.config.embedding_model)
58
- return result.embeddings[0][: self.config.embedding_dim]
58
+ return [float(x) for x in result.embeddings[0][: self.config.embedding_dim]]
@@ -451,7 +451,7 @@ class Graphiti:
451
451
  existing_edges_list: list[list[EntityEdge]] = [
452
452
  source_lst + target_lst
453
453
  for source_lst, target_lst in zip(
454
- existing_source_edges_list, existing_target_edges_list
454
+ existing_source_edges_list, existing_target_edges_list, strict=False
455
455
  )
456
456
  ]
457
457
 
@@ -36,7 +36,7 @@ logger = logging.getLogger(__name__)
36
36
 
37
37
 
38
38
  def is_server_or_retry_error(exception):
39
- if isinstance(exception, (RateLimitError, json.decoder.JSONDecodeError)):
39
+ if isinstance(exception, RateLimitError | json.decoder.JSONDecodeError):
40
40
  return True
41
41
 
42
42
  return (
@@ -15,7 +15,7 @@ limitations under the License.
15
15
  """
16
16
 
17
17
  import json
18
- from typing import Any, Optional, Protocol, TypedDict
18
+ from typing import Any, Protocol, TypedDict
19
19
 
20
20
  from pydantic import BaseModel, Field
21
21
 
@@ -24,7 +24,7 @@ from .models import Message, PromptFunction, PromptVersion
24
24
 
25
25
  class EdgeDuplicate(BaseModel):
26
26
  is_duplicate: bool = Field(..., description='true or false')
27
- uuid: Optional[str] = Field(
27
+ uuid: str | None = Field(
28
28
  None,
29
29
  description="uuid of the existing edge like '5d643020624c42fa9de13f97b1b3fa39' or null",
30
30
  )
@@ -15,7 +15,7 @@ limitations under the License.
15
15
  """
16
16
 
17
17
  import json
18
- from typing import Any, Optional, Protocol, TypedDict
18
+ from typing import Any, Protocol, TypedDict
19
19
 
20
20
  from pydantic import BaseModel, Field
21
21
 
@@ -24,7 +24,7 @@ from .models import Message, PromptFunction, PromptVersion
24
24
 
25
25
  class NodeDuplicate(BaseModel):
26
26
  is_duplicate: bool = Field(..., description='true or false')
27
- uuid: Optional[str] = Field(
27
+ uuid: str | None = Field(
28
28
  None,
29
29
  description="uuid of the existing node like '5d643020624c42fa9de13f97b1b3fa39' or null",
30
30
  )
@@ -57,7 +57,7 @@ def node(context: dict[str, Any]) -> list[Message]:
57
57
  {json.dumps([ep for ep in context['previous_episodes']], indent=2)}
58
58
  </PREVIOUS MESSAGES>
59
59
  <CURRENT MESSAGE>
60
- {context["episode_content"]}
60
+ {context['episode_content']}
61
61
  </CURRENT MESSAGE>
62
62
 
63
63
  <EXISTING NODES>
@@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
14
14
  limitations under the License.
15
15
  """
16
16
 
17
- from typing import Any, Optional, Protocol, TypedDict
17
+ from typing import Any, Protocol, TypedDict
18
18
 
19
19
  from pydantic import BaseModel, Field
20
20
 
@@ -22,11 +22,11 @@ from .models import Message, PromptFunction, PromptVersion
22
22
 
23
23
 
24
24
  class EdgeDates(BaseModel):
25
- valid_at: Optional[str] = Field(
25
+ valid_at: str | None = Field(
26
26
  None,
27
27
  description='The date and time when the relationship described by the edge fact became true or was established. YYYY-MM-DDTHH:MM:SS.SSSSSSZ or null.',
28
28
  )
29
- invalid_at: Optional[str] = Field(
29
+ invalid_at: str | None = Field(
30
30
  None,
31
31
  description='The date and time when the relationship described by the edge fact stopped being true or ended. YYYY-MM-DDTHH:MM:SS.SSSSSSZ or null.',
32
32
  )
@@ -53,7 +53,7 @@ def v1(context: dict[str, Any]) -> list[Message]:
53
53
  {context['previous_episodes']}
54
54
  </PREVIOUS MESSAGES>
55
55
  <CURRENT MESSAGE>
56
- {context["current_episode"]}
56
+ {context['current_episode']}
57
57
  </CURRENT MESSAGE>
58
58
  <REFERENCE TIMESTAMP>
59
59
  {context['reference_timestamp']}
@@ -60,11 +60,11 @@ def edge(context: dict[str, Any]) -> list[Message]:
60
60
  {json.dumps([ep for ep in context['previous_episodes']], indent=2)}
61
61
  </PREVIOUS MESSAGES>
62
62
  <CURRENT MESSAGE>
63
- {context["episode_content"]}
63
+ {context['episode_content']}
64
64
  </CURRENT MESSAGE>
65
65
 
66
66
  <ENTITIES>
67
- {context["nodes"]}
67
+ {context['nodes']}
68
68
  </ENTITIES>
69
69
 
70
70
  {context['custom_prompt']}
@@ -90,15 +90,15 @@ def reflexion(context: dict[str, Any]) -> list[Message]:
90
90
  {json.dumps([ep for ep in context['previous_episodes']], indent=2)}
91
91
  </PREVIOUS MESSAGES>
92
92
  <CURRENT MESSAGE>
93
- {context["episode_content"]}
93
+ {context['episode_content']}
94
94
  </CURRENT MESSAGE>
95
95
 
96
96
  <EXTRACTED ENTITIES>
97
- {context["nodes"]}
97
+ {context['nodes']}
98
98
  </EXTRACTED ENTITIES>
99
99
 
100
100
  <EXTRACTED FACTS>
101
- {context["extracted_facts"]}
101
+ {context['extracted_facts']}
102
102
  </EXTRACTED FACTS>
103
103
 
104
104
  Given the above MESSAGES, list of EXTRACTED ENTITIES entities, and list of EXTRACTED FACTS;
@@ -68,7 +68,7 @@ def extract_message(context: dict[str, Any]) -> list[Message]:
68
68
  {json.dumps([ep for ep in context['previous_episodes']], indent=2)}
69
69
  </PREVIOUS MESSAGES>
70
70
  <CURRENT MESSAGE>
71
- {context["episode_content"]}
71
+ {context['episode_content']}
72
72
  </CURRENT MESSAGE>
73
73
 
74
74
  {context['custom_prompt']}
@@ -96,10 +96,10 @@ def extract_json(context: dict[str, Any]) -> list[Message]:
96
96
 
97
97
  user_prompt = f"""
98
98
  <SOURCE DESCRIPTION>:
99
- {context["source_description"]}
99
+ {context['source_description']}
100
100
  </SOURCE DESCRIPTION>
101
101
  <JSON>
102
- {context["episode_content"]}
102
+ {context['episode_content']}
103
103
  </JSON>
104
104
 
105
105
  {context['custom_prompt']}
@@ -121,7 +121,7 @@ def extract_text(context: dict[str, Any]) -> list[Message]:
121
121
 
122
122
  user_prompt = f"""
123
123
  <TEXT>
124
- {context["episode_content"]}
124
+ {context['episode_content']}
125
125
  </TEXT>
126
126
 
127
127
  {context['custom_prompt']}
@@ -148,11 +148,11 @@ def reflexion(context: dict[str, Any]) -> list[Message]:
148
148
  {json.dumps([ep for ep in context['previous_episodes']], indent=2)}
149
149
  </PREVIOUS MESSAGES>
150
150
  <CURRENT MESSAGE>
151
- {context["episode_content"]}
151
+ {context['episode_content']}
152
152
  </CURRENT MESSAGE>
153
153
 
154
154
  <EXTRACTED ENTITIES>
155
- {context["extracted_entities"]}
155
+ {context['extracted_entities']}
156
156
  </EXTRACTED ENTITIES>
157
157
 
158
158
  Given the above previous messages, current message, and list of extracted entities; determine if any entities haven't been
@@ -172,7 +172,7 @@ def classify_nodes(context: dict[str, Any]) -> list[Message]:
172
172
  {json.dumps([ep for ep in context['previous_episodes']], indent=2)}
173
173
  </PREVIOUS MESSAGES>
174
174
  <CURRENT MESSAGE>
175
- {context["episode_content"]}
175
+ {context['episode_content']}
176
176
  </CURRENT MESSAGE>
177
177
 
178
178
  <EXTRACTED ENTITIES>
@@ -14,7 +14,8 @@ See the License for the specific language governing permissions and
14
14
  limitations under the License.
15
15
  """
16
16
 
17
- from typing import Any, Callable, Protocol
17
+ from collections.abc import Callable
18
+ from typing import Any, Protocol
18
19
 
19
20
  from pydantic import BaseModel
20
21
 
@@ -164,7 +164,7 @@ async def build_community(
164
164
  *[
165
165
  summarize_pair(llm_client, (str(left_summary), str(right_summary)))
166
166
  for left_summary, right_summary in zip(
167
- summaries[: int(length / 2)], summaries[int(length / 2) :]
167
+ summaries[: int(length / 2)], summaries[int(length / 2) :], strict=False
168
168
  )
169
169
  ]
170
170
  )
@@ -213,7 +213,7 @@ async def resolve_extracted_edges(
213
213
  previous_episodes,
214
214
  )
215
215
  for extracted_edge, related_edges, existing_edges in zip(
216
- extracted_edges, related_edges_lists, existing_edges_lists
216
+ extracted_edges, related_edges_lists, existing_edges_lists, strict=False
217
217
  )
218
218
  ]
219
219
  )
@@ -279,7 +279,9 @@ async def resolve_extracted_nodes(
279
279
  previous_episodes,
280
280
  entity_types,
281
281
  )
282
- for extracted_node, existing_nodes in zip(extracted_nodes, existing_nodes_lists)
282
+ for extracted_node, existing_nodes in zip(
283
+ extracted_nodes, existing_nodes_lists, strict=False
284
+ )
283
285
  ]
284
286
  )
285
287
  )
@@ -0,0 +1,86 @@
1
+ [project]
2
+ name = "graphiti-core"
3
+ description = "A temporal graph building library"
4
+ version = "0.9.4"
5
+ authors = [
6
+ {"name" = "Paul Paliychuk", "email" = "paul@getzep.com"},
7
+ {"name" = "Preston Rasmussen", "email" = "preston@getzep.com"},
8
+ {"name" = "Daniel Chalef", "email" = "daniel@getzep.com"},
9
+ ]
10
+ readme = "README.md"
11
+ license = "Apache-2.0"
12
+ requires-python = ">=3.10,<4"
13
+ packages = [{ include = "graphiti_core", from = "." }]
14
+ dependencies = [
15
+ "pydantic>=2.8.2",
16
+ "neo4j>=5.23.0",
17
+ "diskcache>=5.6.3",
18
+ "openai>=1.53.0",
19
+ "tenacity>=9.0.0",
20
+ "numpy>=1.0.0",
21
+ "python-dotenv>=1.0.1",
22
+ ]
23
+
24
+ [project.urls]
25
+ Homepage = "https://help.getzep.com/graphiti/graphiti/overview"
26
+ Repository = "https://github.com/getzep/graphiti"
27
+
28
+ [project.optional-dependencies]
29
+ anthropic = ["anthropic>=0.49.0"]
30
+ groq = ["groq>=0.2.0"]
31
+ google-genai = ["google-genai>=1.8.0"]
32
+
33
+ [tool.poetry.group.dev.dependencies]
34
+ mypy = ">=1.11.1"
35
+ groq = ">=0.2.0"
36
+ anthropic = ">=0.49.0"
37
+ google-genai = ">=1.8.0"
38
+ ipykernel = ">=6.29.5"
39
+ jupyterlab = ">=4.2.4"
40
+ diskcache-stubs = ">=5.6.3.6.20240818"
41
+ langgraph = ">=0.2.15"
42
+ langchain-anthropic = ">=0.2.4"
43
+ langsmith = ">=0.1.108"
44
+ langchain-openai = ">=0.2.6"
45
+ sentence-transformers = ">=3.2.1"
46
+ transformers = ">=4.45.2"
47
+ voyageai = ">=0.2.3"
48
+ pytest = ">=8.3.3"
49
+ pytest-asyncio = ">=0.24.0"
50
+ pytest-xdist = ">=3.6.1"
51
+ ruff = ">=0.7.1"
52
+
53
+ [build-system]
54
+ requires = ["poetry-core"]
55
+ build-backend = "poetry.core.masonry.api"
56
+
57
+ [tool.poetry]
58
+ requires-poetry = ">=2.0"
59
+
60
+ [tool.pytest.ini_options]
61
+ pythonpath = ["."]
62
+
63
+ [tool.ruff]
64
+ line-length = 100
65
+
66
+ [tool.ruff.lint]
67
+ select = [
68
+ # pycodestyle
69
+ "E",
70
+ # Pyflakes
71
+ "F",
72
+ # pyupgrade
73
+ "UP",
74
+ # flake8-bugbear
75
+ "B",
76
+ # flake8-simplify
77
+ "SIM",
78
+ # isort
79
+ "I",
80
+ ]
81
+ ignore = ["E501"]
82
+
83
+ [tool.ruff.format]
84
+ quote-style = "single"
85
+ indent-style = "space"
86
+ docstring-code-format = true
@@ -1,80 +0,0 @@
1
- [tool.poetry]
2
- name = "graphiti-core"
3
- version = "0.9.2"
4
- description = "A temporal graph building library"
5
- authors = [
6
- "Paul Paliychuk <paul@getzep.com>",
7
- "Preston Rasmussen <preston@getzep.com>",
8
- "Daniel Chalef <daniel@getzep.com>",
9
- ]
10
- readme = "README.md"
11
- license = "Apache-2.0"
12
-
13
- packages = [{ include = "graphiti_core", from = "." }]
14
-
15
- [tool.poetry.dependencies]
16
- python = "^3.10"
17
- pydantic = "^2.8.2"
18
- neo4j = "^5.23.0"
19
- diskcache = "^5.6.3"
20
- openai = "^1.53.0"
21
- tenacity = "9.0.0"
22
- numpy = ">=1.0.0"
23
- python-dotenv = "^1.0.1"
24
-
25
- [tool.poetry.extras]
26
- anthropic = ["anthropic"]
27
- groq = ["groq"]
28
- google-genai = ["google-genai"]
29
-
30
- [tool.poetry.group.dev.dependencies]
31
- mypy = "^1.11.1"
32
- groq = ">=0.9,<0.12"
33
- anthropic = "~0.49.0"
34
- google-genai = ">=0.8.0"
35
- ipykernel = "^6.29.5"
36
- jupyterlab = "^4.2.4"
37
- diskcache-stubs = "^5.6.3.6.20240818"
38
- langgraph = "^0.2.15"
39
- langchain-anthropic = "^0.2.4"
40
- langsmith = "^0.1.108"
41
- langchain-openai = "^0.2.6"
42
- sentence-transformers = "^3.2.1"
43
- transformers = "^4.45.2"
44
- voyageai = "^0.2.3"
45
- pytest = "^8.3.3"
46
- pytest-asyncio = "^0.24.0"
47
- pytest-xdist = "^3.6.1"
48
- ruff = "^0.7.1"
49
-
50
- [build-system]
51
- requires = ["poetry-core"]
52
- build-backend = "poetry.core.masonry.api"
53
-
54
- [tool.pytest.ini_options]
55
- pythonpath = ["."]
56
-
57
- [tool.ruff]
58
- line-length = 100
59
-
60
- [tool.ruff.lint]
61
- select = [
62
- # pycodestyle
63
- "E",
64
- # Pyflakes
65
- "F",
66
- # pyupgrade
67
- "UP",
68
- # flake8-bugbear
69
- "B",
70
- # flake8-simplify
71
- "SIM",
72
- # isort
73
- "I",
74
- ]
75
- ignore = ["E501"]
76
-
77
- [tool.ruff.format]
78
- quote-style = "single"
79
- indent-style = "space"
80
- docstring-code-format = true
File without changes