graphiti-core 0.9.3__py3-none-any.whl → 0.9.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of graphiti-core might be problematic. Click here for more details.

@@ -55,4 +55,4 @@ class VoyageAIEmbedder(EmbedderClient):
55
55
  return []
56
56
 
57
57
  result = await self.client.embed(input_list, model=self.config.embedding_model)
58
- return result.embeddings[0][: self.config.embedding_dim]
58
+ return [float(x) for x in result.embeddings[0][: self.config.embedding_dim]]
@@ -57,7 +57,7 @@ def node(context: dict[str, Any]) -> list[Message]:
57
57
  {json.dumps([ep for ep in context['previous_episodes']], indent=2)}
58
58
  </PREVIOUS MESSAGES>
59
59
  <CURRENT MESSAGE>
60
- {context["episode_content"]}
60
+ {context['episode_content']}
61
61
  </CURRENT MESSAGE>
62
62
 
63
63
  <EXISTING NODES>
@@ -53,7 +53,7 @@ def v1(context: dict[str, Any]) -> list[Message]:
53
53
  {context['previous_episodes']}
54
54
  </PREVIOUS MESSAGES>
55
55
  <CURRENT MESSAGE>
56
- {context["current_episode"]}
56
+ {context['current_episode']}
57
57
  </CURRENT MESSAGE>
58
58
  <REFERENCE TIMESTAMP>
59
59
  {context['reference_timestamp']}
@@ -60,11 +60,11 @@ def edge(context: dict[str, Any]) -> list[Message]:
60
60
  {json.dumps([ep for ep in context['previous_episodes']], indent=2)}
61
61
  </PREVIOUS MESSAGES>
62
62
  <CURRENT MESSAGE>
63
- {context["episode_content"]}
63
+ {context['episode_content']}
64
64
  </CURRENT MESSAGE>
65
65
 
66
66
  <ENTITIES>
67
- {context["nodes"]}
67
+ {context['nodes']}
68
68
  </ENTITIES>
69
69
 
70
70
  {context['custom_prompt']}
@@ -90,15 +90,15 @@ def reflexion(context: dict[str, Any]) -> list[Message]:
90
90
  {json.dumps([ep for ep in context['previous_episodes']], indent=2)}
91
91
  </PREVIOUS MESSAGES>
92
92
  <CURRENT MESSAGE>
93
- {context["episode_content"]}
93
+ {context['episode_content']}
94
94
  </CURRENT MESSAGE>
95
95
 
96
96
  <EXTRACTED ENTITIES>
97
- {context["nodes"]}
97
+ {context['nodes']}
98
98
  </EXTRACTED ENTITIES>
99
99
 
100
100
  <EXTRACTED FACTS>
101
- {context["extracted_facts"]}
101
+ {context['extracted_facts']}
102
102
  </EXTRACTED FACTS>
103
103
 
104
104
  Given the above MESSAGES, list of EXTRACTED ENTITIES entities, and list of EXTRACTED FACTS;
@@ -68,7 +68,7 @@ def extract_message(context: dict[str, Any]) -> list[Message]:
68
68
  {json.dumps([ep for ep in context['previous_episodes']], indent=2)}
69
69
  </PREVIOUS MESSAGES>
70
70
  <CURRENT MESSAGE>
71
- {context["episode_content"]}
71
+ {context['episode_content']}
72
72
  </CURRENT MESSAGE>
73
73
 
74
74
  {context['custom_prompt']}
@@ -96,10 +96,10 @@ def extract_json(context: dict[str, Any]) -> list[Message]:
96
96
 
97
97
  user_prompt = f"""
98
98
  <SOURCE DESCRIPTION>:
99
- {context["source_description"]}
99
+ {context['source_description']}
100
100
  </SOURCE DESCRIPTION>
101
101
  <JSON>
102
- {context["episode_content"]}
102
+ {context['episode_content']}
103
103
  </JSON>
104
104
 
105
105
  {context['custom_prompt']}
@@ -121,7 +121,7 @@ def extract_text(context: dict[str, Any]) -> list[Message]:
121
121
 
122
122
  user_prompt = f"""
123
123
  <TEXT>
124
- {context["episode_content"]}
124
+ {context['episode_content']}
125
125
  </TEXT>
126
126
 
127
127
  {context['custom_prompt']}
@@ -148,11 +148,11 @@ def reflexion(context: dict[str, Any]) -> list[Message]:
148
148
  {json.dumps([ep for ep in context['previous_episodes']], indent=2)}
149
149
  </PREVIOUS MESSAGES>
150
150
  <CURRENT MESSAGE>
151
- {context["episode_content"]}
151
+ {context['episode_content']}
152
152
  </CURRENT MESSAGE>
153
153
 
154
154
  <EXTRACTED ENTITIES>
155
- {context["extracted_entities"]}
155
+ {context['extracted_entities']}
156
156
  </EXTRACTED ENTITIES>
157
157
 
158
158
  Given the above previous messages, current message, and list of extracted entities; determine if any entities haven't been
@@ -172,7 +172,7 @@ def classify_nodes(context: dict[str, Any]) -> list[Message]:
172
172
  {json.dumps([ep for ep in context['previous_episodes']], indent=2)}
173
173
  </PREVIOUS MESSAGES>
174
174
  <CURRENT MESSAGE>
175
- {context["episode_content"]}
175
+ {context['episode_content']}
176
176
  </CURRENT MESSAGE>
177
177
 
178
178
  <EXTRACTED ENTITIES>
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: graphiti-core
3
- Version: 0.9.3
3
+ Version: 0.9.4
4
4
  Summary: A temporal graph building library
5
5
  License: Apache-2.0
6
6
  Author: Paul Paliychuk
7
7
  Author-email: paul@getzep.com
8
- Requires-Python: >=3.10
8
+ Requires-Python: >=3.10,<4
9
9
  Classifier: License :: OSI Approved :: Apache Software License
10
10
  Classifier: Programming Language :: Python :: 3
11
11
  Classifier: Programming Language :: Python :: 3.10
@@ -262,6 +262,16 @@ Make sure to replace the placeholder values with your actual Azure OpenAI creden
262
262
 
263
263
  Graphiti supports Google's Gemini models for both LLM inference and embeddings. To use Gemini, you'll need to configure both the LLM client and embedder with your Google API key.
264
264
 
265
+ Install Graphiti:
266
+
267
+ ```bash
268
+ poetry add "graphiti-core[google-genai]"
269
+
270
+ # or
271
+
272
+ uv add "graphiti-core[google-genai]"
273
+ ```
274
+
265
275
  ```python
266
276
  from graphiti_core import Graphiti
267
277
  from graphiti_core.llm_client.gemini_client import GeminiClient, LLMConfig
@@ -8,7 +8,7 @@ graphiti_core/embedder/__init__.py,sha256=EL564ZuE-DZjcuKNUK_exMn_XHXm2LdO9fzdXe
8
8
  graphiti_core/embedder/client.py,sha256=HKIlpPLnzFT81jurPkry6z8F8nxfZVfejdcfxHVUSFU,995
9
9
  graphiti_core/embedder/gemini.py,sha256=nE0XH8wYVGcPSO7DaNQ7kdsQLFSoH4FQOu2HMQUy2ss,2200
10
10
  graphiti_core/embedder/openai.py,sha256=fcU63koSRI-OjDuEcBfUKgXu8XV_-8EF6HpVrYa1_8I,1880
11
- graphiti_core/embedder/voyage.py,sha256=7kqrLG75J3Q6cdA2Nlx1JSYtpk2141ckdl3OtDDw0vU,1882
11
+ graphiti_core/embedder/voyage.py,sha256=DZsH1nSTfP1vqCinNIIwSyEzv7jsyur2tKxlBv-ZZ_E,1902
12
12
  graphiti_core/errors.py,sha256=Nib1uQx2cO_VOizupmRjpFfmuRg-hFAVqTtZAuBehR8,2405
13
13
  graphiti_core/graphiti.py,sha256=Jztk1PGsr15FkJhf-mFYgFGF40tTMQHPUq96kdWz6c8,29726
14
14
  graphiti_core/helpers.py,sha256=7BQzUBFmoBDA2OIDdFtoN4W-vXOhPRIsF0uDb7PsNi0,2913
@@ -30,11 +30,11 @@ graphiti_core/models/nodes/node_db_queries.py,sha256=AQgRGVO-GgFWfLq1G6k8s86WItw
30
30
  graphiti_core/nodes.py,sha256=L_sAXuS-Hbj1I_kmln_y3dBiu_UDMKu64oWTB3ecFss,16938
31
31
  graphiti_core/prompts/__init__.py,sha256=EA-x9xUki9l8wnu2l8ek_oNf75-do5tq5hVq7Zbv8Kw,101
32
32
  graphiti_core/prompts/dedupe_edges.py,sha256=GrLKEHPrEsjK31wQf7AnMJDXaVCrCuJWaNlVAxEd4ks,3543
33
- graphiti_core/prompts/dedupe_nodes.py,sha256=YTJPsbeldHDVbBG1p9JTRZMKQxXa72EZl44YUL9gkbs,4632
33
+ graphiti_core/prompts/dedupe_nodes.py,sha256=VXSb3chBo4l7qzzWxT-sNHJhjY3jNtUBAndnALUcqws,4632
34
34
  graphiti_core/prompts/eval.py,sha256=csW494kKBMvWSm2SYLIRuGgNghhwNR3YwGn3veo3g_Y,3691
35
- graphiti_core/prompts/extract_edge_dates.py,sha256=hcQ2rUyrshExIlTWxg8RNeso3FOPRlKZ27_TcqcXDh8,4080
36
- graphiti_core/prompts/extract_edges.py,sha256=vyEdW7JAPOT_eLWUi6nRmxbvucyVoyoYX2SxXfknRUg,3467
37
- graphiti_core/prompts/extract_nodes.py,sha256=fSi07hGnlo7L5t9c0Q5zcBxDSki7QQKWhex9DFsVWW8,7048
35
+ graphiti_core/prompts/extract_edge_dates.py,sha256=wBM80uGIpUuDQs-5nwSieu3JcEg8PzKVrAFx08qUH80,4080
36
+ graphiti_core/prompts/extract_edges.py,sha256=6-MkMkQp4QSpbdZ-gB3PkMcs7q3E07dAWYM5oY1FZdQ,3467
37
+ graphiti_core/prompts/extract_nodes.py,sha256=I8CO8D7rMaKQv2biBxToYrAMD1RNCe1RMG9tGNv1Tm4,7048
38
38
  graphiti_core/prompts/invalidate_edges.py,sha256=DV2mEyIhhjc0hdKEMFLQMeG0FiUCkv_X0ctCliYjQ2c,3577
39
39
  graphiti_core/prompts/lib.py,sha256=DCyHePM4_q-CptTpEXGO_dBv9k7xDtclEaB1dGu7EcI,4092
40
40
  graphiti_core/prompts/models.py,sha256=NgxdbPHJpBEcpbXovKyScgpBc73Q-GIW-CBDlBtDjto,894
@@ -58,7 +58,7 @@ graphiti_core/utils/maintenance/node_operations.py,sha256=WhZQixx05dAFFQAd5KTXJ8
58
58
  graphiti_core/utils/maintenance/temporal_operations.py,sha256=RdNtubCyYhOVrvcOIq2WppHls1Q-BEjtsN8r38l-Rtc,3691
59
59
  graphiti_core/utils/maintenance/utils.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
60
  graphiti_core/utils/ontology_utils/entity_types_utils.py,sha256=QJX5cG0GSSNF_Mm_yrldr69wjVAbN_MxLhOSznz85Hk,1279
61
- graphiti_core-0.9.3.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
62
- graphiti_core-0.9.3.dist-info/METADATA,sha256=Aa5QrNkSRsXnuYShlNnCi3L83ySgNABW4QgCZQ2coG4,14224
63
- graphiti_core-0.9.3.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
64
- graphiti_core-0.9.3.dist-info/RECORD,,
61
+ graphiti_core-0.9.4.dist-info/LICENSE,sha256=KCUwCyDXuVEgmDWkozHyniRyWjnWUWjkuDHfU6o3JlA,11325
62
+ graphiti_core-0.9.4.dist-info/METADATA,sha256=wl7sIqZBszdCc_7go_qGyGTJCktv6xikxjlrWRedtn8,14344
63
+ graphiti_core-0.9.4.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
64
+ graphiti_core-0.9.4.dist-info/RECORD,,