graphiti-core 0.9.3__tar.gz → 0.9.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of graphiti-core might be problematic. Click here for more details.
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/PKG-INFO +13 -2
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/README.md +10 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/embedder/voyage.py +1 -1
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/graphiti.py +27 -3
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/prompts/dedupe_nodes.py +1 -1
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/prompts/extract_edge_dates.py +1 -1
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/prompts/extract_edges.py +5 -5
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/prompts/extract_nodes.py +7 -7
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/search/search_config.py +1 -1
- graphiti_core-0.9.5/graphiti_core/search/search_helpers.py +47 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/search/search_utils.py +2 -2
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/pyproject.toml +25 -27
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/LICENSE +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/__init__.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/cross_encoder/__init__.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/cross_encoder/bge_reranker_client.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/cross_encoder/client.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/cross_encoder/openai_reranker_client.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/edges.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/embedder/__init__.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/embedder/client.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/embedder/gemini.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/embedder/openai.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/errors.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/helpers.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/llm_client/__init__.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/llm_client/anthropic_client.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/llm_client/client.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/llm_client/config.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/llm_client/errors.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/llm_client/gemini_client.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/llm_client/groq_client.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/llm_client/openai_client.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/llm_client/openai_generic_client.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/llm_client/utils.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/models/__init__.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/models/edges/__init__.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/models/edges/edge_db_queries.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/models/nodes/__init__.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/models/nodes/node_db_queries.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/nodes.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/prompts/__init__.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/prompts/dedupe_edges.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/prompts/eval.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/prompts/invalidate_edges.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/prompts/lib.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/prompts/models.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/prompts/prompt_helpers.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/prompts/summarize_nodes.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/py.typed +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/search/__init__.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/search/search.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/search/search_config_recipes.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/search/search_filters.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/__init__.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/bulk_utils.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/datetime_utils.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/maintenance/__init__.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/maintenance/community_operations.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/maintenance/edge_operations.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/maintenance/graph_data_operations.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/maintenance/node_operations.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/maintenance/temporal_operations.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/maintenance/utils.py +0 -0
- {graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/ontology_utils/entity_types_utils.py +0 -0
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: graphiti-core
|
|
3
|
-
Version: 0.9.
|
|
3
|
+
Version: 0.9.5
|
|
4
4
|
Summary: A temporal graph building library
|
|
5
5
|
License: Apache-2.0
|
|
6
6
|
Author: Paul Paliychuk
|
|
7
7
|
Author-email: paul@getzep.com
|
|
8
|
-
Requires-Python: >=3.10
|
|
8
|
+
Requires-Python: >=3.10,<4
|
|
9
9
|
Classifier: License :: OSI Approved :: Apache Software License
|
|
10
10
|
Classifier: Programming Language :: Python :: 3
|
|
11
11
|
Classifier: Programming Language :: Python :: 3.10
|
|
@@ -18,6 +18,7 @@ Provides-Extra: groq
|
|
|
18
18
|
Requires-Dist: anthropic (>=0.49.0) ; extra == "anthropic"
|
|
19
19
|
Requires-Dist: diskcache (>=5.6.3)
|
|
20
20
|
Requires-Dist: google-genai (>=1.8.0) ; extra == "google-genai"
|
|
21
|
+
Requires-Dist: graph-service (>=1.0.0.7,<2.0.0.0)
|
|
21
22
|
Requires-Dist: groq (>=0.2.0) ; extra == "groq"
|
|
22
23
|
Requires-Dist: neo4j (>=5.23.0)
|
|
23
24
|
Requires-Dist: numpy (>=1.0.0)
|
|
@@ -262,6 +263,16 @@ Make sure to replace the placeholder values with your actual Azure OpenAI creden
|
|
|
262
263
|
|
|
263
264
|
Graphiti supports Google's Gemini models for both LLM inference and embeddings. To use Gemini, you'll need to configure both the LLM client and embedder with your Google API key.
|
|
264
265
|
|
|
266
|
+
Install Graphiti:
|
|
267
|
+
|
|
268
|
+
```bash
|
|
269
|
+
poetry add "graphiti-core[google-genai]"
|
|
270
|
+
|
|
271
|
+
# or
|
|
272
|
+
|
|
273
|
+
uv add "graphiti-core[google-genai]"
|
|
274
|
+
```
|
|
275
|
+
|
|
265
276
|
```python
|
|
266
277
|
from graphiti_core import Graphiti
|
|
267
278
|
from graphiti_core.llm_client.gemini_client import GeminiClient, LLMConfig
|
|
@@ -231,6 +231,16 @@ Make sure to replace the placeholder values with your actual Azure OpenAI creden
|
|
|
231
231
|
|
|
232
232
|
Graphiti supports Google's Gemini models for both LLM inference and embeddings. To use Gemini, you'll need to configure both the LLM client and embedder with your Google API key.
|
|
233
233
|
|
|
234
|
+
Install Graphiti:
|
|
235
|
+
|
|
236
|
+
```bash
|
|
237
|
+
poetry add "graphiti-core[google-genai]"
|
|
238
|
+
|
|
239
|
+
# or
|
|
240
|
+
|
|
241
|
+
uv add "graphiti-core[google-genai]"
|
|
242
|
+
```
|
|
243
|
+
|
|
234
244
|
```python
|
|
235
245
|
from graphiti_core import Graphiti
|
|
236
246
|
from graphiti_core.llm_client.gemini_client import GeminiClient, LLMConfig
|
|
@@ -55,4 +55,4 @@ class VoyageAIEmbedder(EmbedderClient):
|
|
|
55
55
|
return []
|
|
56
56
|
|
|
57
57
|
result = await self.client.embed(input_list, model=self.config.embedding_model)
|
|
58
|
-
return result.embeddings[0][: self.config.embedding_dim]
|
|
58
|
+
return [float(x) for x in result.embeddings[0][: self.config.embedding_dim]]
|
|
@@ -33,6 +33,7 @@ from graphiti_core.nodes import CommunityNode, EntityNode, EpisodeType, Episodic
|
|
|
33
33
|
from graphiti_core.search.search import SearchConfig, search
|
|
34
34
|
from graphiti_core.search.search_config import DEFAULT_SEARCH_LIMIT, SearchResults
|
|
35
35
|
from graphiti_core.search.search_config_recipes import (
|
|
36
|
+
COMBINED_HYBRID_SEARCH_CROSS_ENCODER,
|
|
36
37
|
EDGE_HYBRID_SEARCH_NODE_DISTANCE,
|
|
37
38
|
EDGE_HYBRID_SEARCH_RRF,
|
|
38
39
|
)
|
|
@@ -647,7 +648,10 @@ class Graphiti:
|
|
|
647
648
|
Perform a hybrid search on the knowledge graph.
|
|
648
649
|
|
|
649
650
|
This method executes a search query on the graph, combining vector and
|
|
650
|
-
text-based search techniques to retrieve relevant facts.
|
|
651
|
+
text-based search techniques to retrieve relevant facts, returning the edges as a string.
|
|
652
|
+
|
|
653
|
+
This is our basic out-of-the-box search, for more robust results we recommend using our more advanced
|
|
654
|
+
search method graphiti.search_().
|
|
651
655
|
|
|
652
656
|
Parameters
|
|
653
657
|
----------
|
|
@@ -668,8 +672,7 @@ class Graphiti:
|
|
|
668
672
|
Notes
|
|
669
673
|
-----
|
|
670
674
|
This method uses a SearchConfig with num_episodes set to 0 and
|
|
671
|
-
num_results set to the provided num_results parameter.
|
|
672
|
-
the hybrid_search function to perform the actual search operation.
|
|
675
|
+
num_results set to the provided num_results parameter.
|
|
673
676
|
|
|
674
677
|
The search is performed using the current date and time as the reference
|
|
675
678
|
point for temporal relevance.
|
|
@@ -703,6 +706,27 @@ class Graphiti:
|
|
|
703
706
|
bfs_origin_node_uuids: list[str] | None = None,
|
|
704
707
|
search_filter: SearchFilters | None = None,
|
|
705
708
|
) -> SearchResults:
|
|
709
|
+
"""DEPRECATED"""
|
|
710
|
+
return await self.search_(
|
|
711
|
+
query, config, group_ids, center_node_uuid, bfs_origin_node_uuids, search_filter
|
|
712
|
+
)
|
|
713
|
+
|
|
714
|
+
async def search_(
|
|
715
|
+
self,
|
|
716
|
+
query: str,
|
|
717
|
+
config: SearchConfig = COMBINED_HYBRID_SEARCH_CROSS_ENCODER,
|
|
718
|
+
group_ids: list[str] | None = None,
|
|
719
|
+
center_node_uuid: str | None = None,
|
|
720
|
+
bfs_origin_node_uuids: list[str] | None = None,
|
|
721
|
+
search_filter: SearchFilters | None = None,
|
|
722
|
+
) -> SearchResults:
|
|
723
|
+
"""search_ (replaces _search) is our advanced search method that returns Graph objects (nodes and edges) rather
|
|
724
|
+
than a list of facts. This endpoint allows the end user to utilize more advanced features such as filters and
|
|
725
|
+
different search and reranker methodologies across different layers in the graph.
|
|
726
|
+
|
|
727
|
+
For different config recipes refer to search/search_config_recipes.
|
|
728
|
+
"""
|
|
729
|
+
|
|
706
730
|
return await search(
|
|
707
731
|
self.driver,
|
|
708
732
|
self.embedder,
|
|
@@ -57,7 +57,7 @@ def node(context: dict[str, Any]) -> list[Message]:
|
|
|
57
57
|
{json.dumps([ep for ep in context['previous_episodes']], indent=2)}
|
|
58
58
|
</PREVIOUS MESSAGES>
|
|
59
59
|
<CURRENT MESSAGE>
|
|
60
|
-
{context[
|
|
60
|
+
{context['episode_content']}
|
|
61
61
|
</CURRENT MESSAGE>
|
|
62
62
|
|
|
63
63
|
<EXISTING NODES>
|
|
@@ -53,7 +53,7 @@ def v1(context: dict[str, Any]) -> list[Message]:
|
|
|
53
53
|
{context['previous_episodes']}
|
|
54
54
|
</PREVIOUS MESSAGES>
|
|
55
55
|
<CURRENT MESSAGE>
|
|
56
|
-
{context[
|
|
56
|
+
{context['current_episode']}
|
|
57
57
|
</CURRENT MESSAGE>
|
|
58
58
|
<REFERENCE TIMESTAMP>
|
|
59
59
|
{context['reference_timestamp']}
|
|
@@ -60,11 +60,11 @@ def edge(context: dict[str, Any]) -> list[Message]:
|
|
|
60
60
|
{json.dumps([ep for ep in context['previous_episodes']], indent=2)}
|
|
61
61
|
</PREVIOUS MESSAGES>
|
|
62
62
|
<CURRENT MESSAGE>
|
|
63
|
-
{context[
|
|
63
|
+
{context['episode_content']}
|
|
64
64
|
</CURRENT MESSAGE>
|
|
65
65
|
|
|
66
66
|
<ENTITIES>
|
|
67
|
-
{context[
|
|
67
|
+
{context['nodes']}
|
|
68
68
|
</ENTITIES>
|
|
69
69
|
|
|
70
70
|
{context['custom_prompt']}
|
|
@@ -90,15 +90,15 @@ def reflexion(context: dict[str, Any]) -> list[Message]:
|
|
|
90
90
|
{json.dumps([ep for ep in context['previous_episodes']], indent=2)}
|
|
91
91
|
</PREVIOUS MESSAGES>
|
|
92
92
|
<CURRENT MESSAGE>
|
|
93
|
-
{context[
|
|
93
|
+
{context['episode_content']}
|
|
94
94
|
</CURRENT MESSAGE>
|
|
95
95
|
|
|
96
96
|
<EXTRACTED ENTITIES>
|
|
97
|
-
{context[
|
|
97
|
+
{context['nodes']}
|
|
98
98
|
</EXTRACTED ENTITIES>
|
|
99
99
|
|
|
100
100
|
<EXTRACTED FACTS>
|
|
101
|
-
{context[
|
|
101
|
+
{context['extracted_facts']}
|
|
102
102
|
</EXTRACTED FACTS>
|
|
103
103
|
|
|
104
104
|
Given the above MESSAGES, list of EXTRACTED ENTITIES entities, and list of EXTRACTED FACTS;
|
|
@@ -68,7 +68,7 @@ def extract_message(context: dict[str, Any]) -> list[Message]:
|
|
|
68
68
|
{json.dumps([ep for ep in context['previous_episodes']], indent=2)}
|
|
69
69
|
</PREVIOUS MESSAGES>
|
|
70
70
|
<CURRENT MESSAGE>
|
|
71
|
-
{context[
|
|
71
|
+
{context['episode_content']}
|
|
72
72
|
</CURRENT MESSAGE>
|
|
73
73
|
|
|
74
74
|
{context['custom_prompt']}
|
|
@@ -96,10 +96,10 @@ def extract_json(context: dict[str, Any]) -> list[Message]:
|
|
|
96
96
|
|
|
97
97
|
user_prompt = f"""
|
|
98
98
|
<SOURCE DESCRIPTION>:
|
|
99
|
-
{context[
|
|
99
|
+
{context['source_description']}
|
|
100
100
|
</SOURCE DESCRIPTION>
|
|
101
101
|
<JSON>
|
|
102
|
-
{context[
|
|
102
|
+
{context['episode_content']}
|
|
103
103
|
</JSON>
|
|
104
104
|
|
|
105
105
|
{context['custom_prompt']}
|
|
@@ -121,7 +121,7 @@ def extract_text(context: dict[str, Any]) -> list[Message]:
|
|
|
121
121
|
|
|
122
122
|
user_prompt = f"""
|
|
123
123
|
<TEXT>
|
|
124
|
-
{context[
|
|
124
|
+
{context['episode_content']}
|
|
125
125
|
</TEXT>
|
|
126
126
|
|
|
127
127
|
{context['custom_prompt']}
|
|
@@ -148,11 +148,11 @@ def reflexion(context: dict[str, Any]) -> list[Message]:
|
|
|
148
148
|
{json.dumps([ep for ep in context['previous_episodes']], indent=2)}
|
|
149
149
|
</PREVIOUS MESSAGES>
|
|
150
150
|
<CURRENT MESSAGE>
|
|
151
|
-
{context[
|
|
151
|
+
{context['episode_content']}
|
|
152
152
|
</CURRENT MESSAGE>
|
|
153
153
|
|
|
154
154
|
<EXTRACTED ENTITIES>
|
|
155
|
-
{context[
|
|
155
|
+
{context['extracted_entities']}
|
|
156
156
|
</EXTRACTED ENTITIES>
|
|
157
157
|
|
|
158
158
|
Given the above previous messages, current message, and list of extracted entities; determine if any entities haven't been
|
|
@@ -172,7 +172,7 @@ def classify_nodes(context: dict[str, Any]) -> list[Message]:
|
|
|
172
172
|
{json.dumps([ep for ep in context['previous_episodes']], indent=2)}
|
|
173
173
|
</PREVIOUS MESSAGES>
|
|
174
174
|
<CURRENT MESSAGE>
|
|
175
|
-
{context[
|
|
175
|
+
{context['episode_content']}
|
|
176
176
|
</CURRENT MESSAGE>
|
|
177
177
|
|
|
178
178
|
<EXTRACTED ENTITIES>
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright 2024, Zep Software, Inc.
|
|
3
|
+
|
|
4
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
you may not use this file except in compliance with the License.
|
|
6
|
+
You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
See the License for the specific language governing permissions and
|
|
14
|
+
limitations under the License.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
from graphiti_core.edges import EntityEdge
|
|
18
|
+
from graphiti_core.search.search_config import SearchResults
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def format_edge_date_range(edge: EntityEdge) -> str:
|
|
22
|
+
# return f"{datetime(edge.valid_at).strftime('%Y-%m-%d %H:%M:%S') if edge.valid_at else 'date unknown'} - {(edge.invalid_at.strftime('%Y-%m-%d %H:%M:%S') if edge.invalid_at else 'present')}"
|
|
23
|
+
return f'{edge.valid_at if edge.valid_at else "date unknown"} - {(edge.invalid_at if edge.invalid_at else "present")}'
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def search_results_to_context_string(search_results: SearchResults) -> str:
|
|
27
|
+
"""Reformats a set of SearchResults into a single string to pass directly to an LLM as context"""
|
|
28
|
+
context_string = """FACTS and ENTITIES represent relevant context to the current conversation.
|
|
29
|
+
COMMUNITIES represent a cluster of closely related entities.
|
|
30
|
+
|
|
31
|
+
# These are the most relevant facts and their valid date ranges
|
|
32
|
+
# format: FACT (Date range: from - to)
|
|
33
|
+
"""
|
|
34
|
+
context_string += '<FACTS>\n'
|
|
35
|
+
for edge in search_results.edges:
|
|
36
|
+
context_string += f'- {edge.fact} ({format_edge_date_range(edge)})\n'
|
|
37
|
+
context_string += '</FACTS>\n'
|
|
38
|
+
context_string += '<ENTITIES>\n'
|
|
39
|
+
for node in search_results.nodes:
|
|
40
|
+
context_string += f'- {node.name}: {node.summary}\n'
|
|
41
|
+
context_string += '</ENTITIES>\n'
|
|
42
|
+
context_string += '<COMMUNITIES>\n'
|
|
43
|
+
for community in search_results.communities:
|
|
44
|
+
context_string += f'- {community.name}: {community.summary}\n'
|
|
45
|
+
context_string += '</COMMUNITIES>\n'
|
|
46
|
+
|
|
47
|
+
return context_string
|
|
@@ -229,8 +229,8 @@ async def edge_similarity_search(
|
|
|
229
229
|
|
|
230
230
|
query: LiteralString = (
|
|
231
231
|
"""
|
|
232
|
-
|
|
233
|
-
|
|
232
|
+
MATCH (n:Entity)-[r:RELATES_TO]->(m:Entity)
|
|
233
|
+
"""
|
|
234
234
|
+ group_filter_query
|
|
235
235
|
+ filter_query
|
|
236
236
|
+ """\nWITH DISTINCT r, vector.similarity.cosine(r.fact_embedding, $search_vector) AS score
|
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "graphiti-core"
|
|
3
3
|
description = "A temporal graph building library"
|
|
4
|
-
version = "0.9.
|
|
4
|
+
version = "0.9.5"
|
|
5
5
|
authors = [
|
|
6
|
-
{"name" = "Paul Paliychuk", "email" = "paul@getzep.com"},
|
|
7
|
-
{"name" = "Preston Rasmussen", "email" = "preston@getzep.com"},
|
|
8
|
-
{"name" = "Daniel Chalef", "email" = "daniel@getzep.com"},
|
|
6
|
+
{ "name" = "Paul Paliychuk", "email" = "paul@getzep.com" },
|
|
7
|
+
{ "name" = "Preston Rasmussen", "email" = "preston@getzep.com" },
|
|
8
|
+
{ "name" = "Daniel Chalef", "email" = "daniel@getzep.com" },
|
|
9
9
|
]
|
|
10
10
|
readme = "README.md"
|
|
11
11
|
license = "Apache-2.0"
|
|
12
|
-
requires-python = ">=3.10"
|
|
12
|
+
requires-python = ">=3.10,<4"
|
|
13
13
|
packages = [{ include = "graphiti_core", from = "." }]
|
|
14
14
|
dependencies = [
|
|
15
15
|
"pydantic>=2.8.2",
|
|
@@ -19,9 +19,9 @@ dependencies = [
|
|
|
19
19
|
"tenacity>=9.0.0",
|
|
20
20
|
"numpy>=1.0.0",
|
|
21
21
|
"python-dotenv>=1.0.1",
|
|
22
|
+
"graph-service (>=1.0.0.7,<2.0.0.0)",
|
|
22
23
|
]
|
|
23
24
|
|
|
24
|
-
|
|
25
25
|
[project.urls]
|
|
26
26
|
Homepage = "https://help.getzep.com/graphiti/graphiti/overview"
|
|
27
27
|
Repository = "https://github.com/getzep/graphiti"
|
|
@@ -31,27 +31,25 @@ anthropic = ["anthropic>=0.49.0"]
|
|
|
31
31
|
groq = ["groq>=0.2.0"]
|
|
32
32
|
google-genai = ["google-genai>=1.8.0"]
|
|
33
33
|
|
|
34
|
-
[
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
"ruff>=0.7.1",
|
|
54
|
-
]
|
|
34
|
+
[tool.poetry.group.dev.dependencies]
|
|
35
|
+
mypy = ">=1.11.1"
|
|
36
|
+
groq = ">=0.2.0"
|
|
37
|
+
anthropic = ">=0.49.0"
|
|
38
|
+
google-genai = ">=1.8.0"
|
|
39
|
+
ipykernel = ">=6.29.5"
|
|
40
|
+
jupyterlab = ">=4.2.4"
|
|
41
|
+
diskcache-stubs = ">=5.6.3.6.20240818"
|
|
42
|
+
langgraph = ">=0.2.15"
|
|
43
|
+
langchain-anthropic = ">=0.2.4"
|
|
44
|
+
langsmith = ">=0.1.108"
|
|
45
|
+
langchain-openai = ">=0.2.6"
|
|
46
|
+
sentence-transformers = ">=3.2.1"
|
|
47
|
+
transformers = ">=4.45.2"
|
|
48
|
+
voyageai = ">=0.2.3"
|
|
49
|
+
pytest = ">=8.3.3"
|
|
50
|
+
pytest-asyncio = ">=0.24.0"
|
|
51
|
+
pytest-xdist = ">=3.6.1"
|
|
52
|
+
ruff = ">=0.7.1"
|
|
55
53
|
|
|
56
54
|
[build-system]
|
|
57
55
|
requires = ["poetry-core"]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/cross_encoder/bge_reranker_client.py
RENAMED
|
File without changes
|
|
File without changes
|
{graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/cross_encoder/openai_reranker_client.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/llm_client/openai_generic_client.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/maintenance/community_operations.py
RENAMED
|
File without changes
|
{graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/maintenance/edge_operations.py
RENAMED
|
File without changes
|
{graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/maintenance/graph_data_operations.py
RENAMED
|
File without changes
|
{graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/maintenance/node_operations.py
RENAMED
|
File without changes
|
{graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/maintenance/temporal_operations.py
RENAMED
|
File without changes
|
|
File without changes
|
{graphiti_core-0.9.3 → graphiti_core-0.9.5}/graphiti_core/utils/ontology_utils/entity_types_utils.py
RENAMED
|
File without changes
|