graphiti-core 0.17.4__py3-none-any.whl → 0.25.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- graphiti_core/cross_encoder/gemini_reranker_client.py +1 -1
- graphiti_core/cross_encoder/openai_reranker_client.py +1 -1
- graphiti_core/decorators.py +110 -0
- graphiti_core/driver/driver.py +62 -2
- graphiti_core/driver/falkordb_driver.py +215 -23
- graphiti_core/driver/graph_operations/graph_operations.py +191 -0
- graphiti_core/driver/kuzu_driver.py +182 -0
- graphiti_core/driver/neo4j_driver.py +70 -8
- graphiti_core/driver/neptune_driver.py +305 -0
- graphiti_core/driver/search_interface/search_interface.py +89 -0
- graphiti_core/edges.py +264 -132
- graphiti_core/embedder/azure_openai.py +10 -3
- graphiti_core/embedder/client.py +2 -1
- graphiti_core/graph_queries.py +114 -101
- graphiti_core/graphiti.py +635 -260
- graphiti_core/graphiti_types.py +2 -0
- graphiti_core/helpers.py +37 -15
- graphiti_core/llm_client/anthropic_client.py +142 -52
- graphiti_core/llm_client/azure_openai_client.py +57 -19
- graphiti_core/llm_client/client.py +83 -21
- graphiti_core/llm_client/config.py +1 -1
- graphiti_core/llm_client/gemini_client.py +75 -57
- graphiti_core/llm_client/openai_base_client.py +92 -48
- graphiti_core/llm_client/openai_client.py +39 -9
- graphiti_core/llm_client/openai_generic_client.py +91 -56
- graphiti_core/models/edges/edge_db_queries.py +259 -35
- graphiti_core/models/nodes/node_db_queries.py +311 -32
- graphiti_core/nodes.py +388 -164
- graphiti_core/prompts/dedupe_edges.py +42 -31
- graphiti_core/prompts/dedupe_nodes.py +56 -39
- graphiti_core/prompts/eval.py +4 -4
- graphiti_core/prompts/extract_edges.py +24 -15
- graphiti_core/prompts/extract_nodes.py +76 -35
- graphiti_core/prompts/prompt_helpers.py +39 -0
- graphiti_core/prompts/snippets.py +29 -0
- graphiti_core/prompts/summarize_nodes.py +23 -25
- graphiti_core/search/search.py +154 -74
- graphiti_core/search/search_config.py +39 -4
- graphiti_core/search/search_filters.py +110 -31
- graphiti_core/search/search_helpers.py +5 -6
- graphiti_core/search/search_utils.py +1360 -473
- graphiti_core/tracer.py +193 -0
- graphiti_core/utils/bulk_utils.py +216 -90
- graphiti_core/utils/content_chunking.py +702 -0
- graphiti_core/utils/datetime_utils.py +13 -0
- graphiti_core/utils/maintenance/community_operations.py +62 -38
- graphiti_core/utils/maintenance/dedup_helpers.py +262 -0
- graphiti_core/utils/maintenance/edge_operations.py +306 -156
- graphiti_core/utils/maintenance/graph_data_operations.py +44 -74
- graphiti_core/utils/maintenance/node_operations.py +466 -206
- graphiti_core/utils/maintenance/temporal_operations.py +11 -3
- graphiti_core/utils/ontology_utils/entity_types_utils.py +1 -1
- graphiti_core/utils/text_utils.py +53 -0
- {graphiti_core-0.17.4.dist-info → graphiti_core-0.25.3.dist-info}/METADATA +221 -87
- graphiti_core-0.25.3.dist-info/RECORD +87 -0
- {graphiti_core-0.17.4.dist-info → graphiti_core-0.25.3.dist-info}/WHEEL +1 -1
- graphiti_core-0.17.4.dist-info/RECORD +0 -77
- /graphiti_core/{utils/maintenance/utils.py → migrations/__init__.py} +0 -0
- {graphiti_core-0.17.4.dist-info → graphiti_core-0.25.3.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright 2024, Zep Software, Inc.
|
|
3
|
+
|
|
4
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
you may not use this file except in compliance with the License.
|
|
6
|
+
You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
See the License for the specific language governing permissions and
|
|
14
|
+
limitations under the License.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
from typing import Any
|
|
18
|
+
|
|
19
|
+
from pydantic import BaseModel
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class GraphOperationsInterface(BaseModel):
|
|
23
|
+
"""
|
|
24
|
+
Interface for updating graph mutation behavior.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
# -----------------
|
|
28
|
+
# Node: Save/Delete
|
|
29
|
+
# -----------------
|
|
30
|
+
|
|
31
|
+
async def node_save(self, node: Any, driver: Any) -> None:
|
|
32
|
+
"""Persist (create or update) a single node."""
|
|
33
|
+
raise NotImplementedError
|
|
34
|
+
|
|
35
|
+
async def node_delete(self, node: Any, driver: Any) -> None:
|
|
36
|
+
raise NotImplementedError
|
|
37
|
+
|
|
38
|
+
async def node_save_bulk(
|
|
39
|
+
self,
|
|
40
|
+
_cls: Any, # kept for parity; callers won't pass it
|
|
41
|
+
driver: Any,
|
|
42
|
+
transaction: Any,
|
|
43
|
+
nodes: list[Any],
|
|
44
|
+
batch_size: int = 100,
|
|
45
|
+
) -> None:
|
|
46
|
+
"""Persist (create or update) many nodes in batches."""
|
|
47
|
+
raise NotImplementedError
|
|
48
|
+
|
|
49
|
+
async def node_delete_by_group_id(
|
|
50
|
+
self,
|
|
51
|
+
_cls: Any,
|
|
52
|
+
driver: Any,
|
|
53
|
+
group_id: str,
|
|
54
|
+
batch_size: int = 100,
|
|
55
|
+
) -> None:
|
|
56
|
+
raise NotImplementedError
|
|
57
|
+
|
|
58
|
+
async def node_delete_by_uuids(
|
|
59
|
+
self,
|
|
60
|
+
_cls: Any,
|
|
61
|
+
driver: Any,
|
|
62
|
+
uuids: list[str],
|
|
63
|
+
group_id: str | None = None,
|
|
64
|
+
batch_size: int = 100,
|
|
65
|
+
) -> None:
|
|
66
|
+
raise NotImplementedError
|
|
67
|
+
|
|
68
|
+
# --------------------------
|
|
69
|
+
# Node: Embeddings (load)
|
|
70
|
+
# --------------------------
|
|
71
|
+
|
|
72
|
+
async def node_load_embeddings(self, node: Any, driver: Any) -> None:
|
|
73
|
+
"""
|
|
74
|
+
Load embedding vectors for a single node into the instance (e.g., set node.embedding or similar).
|
|
75
|
+
"""
|
|
76
|
+
raise NotImplementedError
|
|
77
|
+
|
|
78
|
+
async def node_load_embeddings_bulk(
|
|
79
|
+
self,
|
|
80
|
+
driver: Any,
|
|
81
|
+
nodes: list[Any],
|
|
82
|
+
batch_size: int = 100,
|
|
83
|
+
) -> dict[str, list[float]]:
|
|
84
|
+
"""
|
|
85
|
+
Load embedding vectors for many nodes in batches.
|
|
86
|
+
"""
|
|
87
|
+
raise NotImplementedError
|
|
88
|
+
|
|
89
|
+
# --------------------------
|
|
90
|
+
# EpisodicNode: Save/Delete
|
|
91
|
+
# --------------------------
|
|
92
|
+
|
|
93
|
+
async def episodic_node_save(self, node: Any, driver: Any) -> None:
|
|
94
|
+
"""Persist (create or update) a single episodic node."""
|
|
95
|
+
raise NotImplementedError
|
|
96
|
+
|
|
97
|
+
async def episodic_node_delete(self, node: Any, driver: Any) -> None:
|
|
98
|
+
raise NotImplementedError
|
|
99
|
+
|
|
100
|
+
async def episodic_node_save_bulk(
|
|
101
|
+
self,
|
|
102
|
+
_cls: Any,
|
|
103
|
+
driver: Any,
|
|
104
|
+
transaction: Any,
|
|
105
|
+
nodes: list[Any],
|
|
106
|
+
batch_size: int = 100,
|
|
107
|
+
) -> None:
|
|
108
|
+
"""Persist (create or update) many episodic nodes in batches."""
|
|
109
|
+
raise NotImplementedError
|
|
110
|
+
|
|
111
|
+
async def episodic_edge_save_bulk(
|
|
112
|
+
self,
|
|
113
|
+
_cls: Any,
|
|
114
|
+
driver: Any,
|
|
115
|
+
transaction: Any,
|
|
116
|
+
episodic_edges: list[Any],
|
|
117
|
+
batch_size: int = 100,
|
|
118
|
+
) -> None:
|
|
119
|
+
"""Persist (create or update) many episodic edges in batches."""
|
|
120
|
+
raise NotImplementedError
|
|
121
|
+
|
|
122
|
+
async def episodic_node_delete_by_group_id(
|
|
123
|
+
self,
|
|
124
|
+
_cls: Any,
|
|
125
|
+
driver: Any,
|
|
126
|
+
group_id: str,
|
|
127
|
+
batch_size: int = 100,
|
|
128
|
+
) -> None:
|
|
129
|
+
raise NotImplementedError
|
|
130
|
+
|
|
131
|
+
async def episodic_node_delete_by_uuids(
|
|
132
|
+
self,
|
|
133
|
+
_cls: Any,
|
|
134
|
+
driver: Any,
|
|
135
|
+
uuids: list[str],
|
|
136
|
+
group_id: str | None = None,
|
|
137
|
+
batch_size: int = 100,
|
|
138
|
+
) -> None:
|
|
139
|
+
raise NotImplementedError
|
|
140
|
+
|
|
141
|
+
# -----------------
|
|
142
|
+
# Edge: Save/Delete
|
|
143
|
+
# -----------------
|
|
144
|
+
|
|
145
|
+
async def edge_save(self, edge: Any, driver: Any) -> None:
|
|
146
|
+
"""Persist (create or update) a single edge."""
|
|
147
|
+
raise NotImplementedError
|
|
148
|
+
|
|
149
|
+
async def edge_delete(self, edge: Any, driver: Any) -> None:
|
|
150
|
+
raise NotImplementedError
|
|
151
|
+
|
|
152
|
+
async def edge_save_bulk(
|
|
153
|
+
self,
|
|
154
|
+
_cls: Any,
|
|
155
|
+
driver: Any,
|
|
156
|
+
transaction: Any,
|
|
157
|
+
edges: list[Any],
|
|
158
|
+
batch_size: int = 100,
|
|
159
|
+
) -> None:
|
|
160
|
+
"""Persist (create or update) many edges in batches."""
|
|
161
|
+
raise NotImplementedError
|
|
162
|
+
|
|
163
|
+
async def edge_delete_by_uuids(
|
|
164
|
+
self,
|
|
165
|
+
_cls: Any,
|
|
166
|
+
driver: Any,
|
|
167
|
+
uuids: list[str],
|
|
168
|
+
group_id: str | None = None,
|
|
169
|
+
) -> None:
|
|
170
|
+
raise NotImplementedError
|
|
171
|
+
|
|
172
|
+
# -----------------
|
|
173
|
+
# Edge: Embeddings (load)
|
|
174
|
+
# -----------------
|
|
175
|
+
|
|
176
|
+
async def edge_load_embeddings(self, edge: Any, driver: Any) -> None:
|
|
177
|
+
"""
|
|
178
|
+
Load embedding vectors for a single edge into the instance (e.g., set edge.embedding or similar).
|
|
179
|
+
"""
|
|
180
|
+
raise NotImplementedError
|
|
181
|
+
|
|
182
|
+
async def edge_load_embeddings_bulk(
|
|
183
|
+
self,
|
|
184
|
+
driver: Any,
|
|
185
|
+
edges: list[Any],
|
|
186
|
+
batch_size: int = 100,
|
|
187
|
+
) -> dict[str, list[float]]:
|
|
188
|
+
"""
|
|
189
|
+
Load embedding vectors for many edges in batches
|
|
190
|
+
"""
|
|
191
|
+
raise NotImplementedError
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright 2024, Zep Software, Inc.
|
|
3
|
+
|
|
4
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
you may not use this file except in compliance with the License.
|
|
6
|
+
You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
See the License for the specific language governing permissions and
|
|
14
|
+
limitations under the License.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import logging
|
|
18
|
+
from typing import Any
|
|
19
|
+
|
|
20
|
+
import kuzu
|
|
21
|
+
|
|
22
|
+
from graphiti_core.driver.driver import GraphDriver, GraphDriverSession, GraphProvider
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
# Kuzu requires an explicit schema.
|
|
27
|
+
# As Kuzu currently does not support creating full text indexes on edge properties,
|
|
28
|
+
# we work around this by representing (n:Entity)-[:RELATES_TO]->(m:Entity) as
|
|
29
|
+
# (n)-[:RELATES_TO]->(e:RelatesToNode_)-[:RELATES_TO]->(m).
|
|
30
|
+
SCHEMA_QUERIES = """
|
|
31
|
+
CREATE NODE TABLE IF NOT EXISTS Episodic (
|
|
32
|
+
uuid STRING PRIMARY KEY,
|
|
33
|
+
name STRING,
|
|
34
|
+
group_id STRING,
|
|
35
|
+
created_at TIMESTAMP,
|
|
36
|
+
source STRING,
|
|
37
|
+
source_description STRING,
|
|
38
|
+
content STRING,
|
|
39
|
+
valid_at TIMESTAMP,
|
|
40
|
+
entity_edges STRING[]
|
|
41
|
+
);
|
|
42
|
+
CREATE NODE TABLE IF NOT EXISTS Entity (
|
|
43
|
+
uuid STRING PRIMARY KEY,
|
|
44
|
+
name STRING,
|
|
45
|
+
group_id STRING,
|
|
46
|
+
labels STRING[],
|
|
47
|
+
created_at TIMESTAMP,
|
|
48
|
+
name_embedding FLOAT[],
|
|
49
|
+
summary STRING,
|
|
50
|
+
attributes STRING
|
|
51
|
+
);
|
|
52
|
+
CREATE NODE TABLE IF NOT EXISTS Community (
|
|
53
|
+
uuid STRING PRIMARY KEY,
|
|
54
|
+
name STRING,
|
|
55
|
+
group_id STRING,
|
|
56
|
+
created_at TIMESTAMP,
|
|
57
|
+
name_embedding FLOAT[],
|
|
58
|
+
summary STRING
|
|
59
|
+
);
|
|
60
|
+
CREATE NODE TABLE IF NOT EXISTS RelatesToNode_ (
|
|
61
|
+
uuid STRING PRIMARY KEY,
|
|
62
|
+
group_id STRING,
|
|
63
|
+
created_at TIMESTAMP,
|
|
64
|
+
name STRING,
|
|
65
|
+
fact STRING,
|
|
66
|
+
fact_embedding FLOAT[],
|
|
67
|
+
episodes STRING[],
|
|
68
|
+
expired_at TIMESTAMP,
|
|
69
|
+
valid_at TIMESTAMP,
|
|
70
|
+
invalid_at TIMESTAMP,
|
|
71
|
+
attributes STRING
|
|
72
|
+
);
|
|
73
|
+
CREATE REL TABLE IF NOT EXISTS RELATES_TO(
|
|
74
|
+
FROM Entity TO RelatesToNode_,
|
|
75
|
+
FROM RelatesToNode_ TO Entity
|
|
76
|
+
);
|
|
77
|
+
CREATE REL TABLE IF NOT EXISTS MENTIONS(
|
|
78
|
+
FROM Episodic TO Entity,
|
|
79
|
+
uuid STRING PRIMARY KEY,
|
|
80
|
+
group_id STRING,
|
|
81
|
+
created_at TIMESTAMP
|
|
82
|
+
);
|
|
83
|
+
CREATE REL TABLE IF NOT EXISTS HAS_MEMBER(
|
|
84
|
+
FROM Community TO Entity,
|
|
85
|
+
FROM Community TO Community,
|
|
86
|
+
uuid STRING,
|
|
87
|
+
group_id STRING,
|
|
88
|
+
created_at TIMESTAMP
|
|
89
|
+
);
|
|
90
|
+
"""
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
class KuzuDriver(GraphDriver):
|
|
94
|
+
provider: GraphProvider = GraphProvider.KUZU
|
|
95
|
+
aoss_client: None = None
|
|
96
|
+
|
|
97
|
+
def __init__(
|
|
98
|
+
self,
|
|
99
|
+
db: str = ':memory:',
|
|
100
|
+
max_concurrent_queries: int = 1,
|
|
101
|
+
):
|
|
102
|
+
super().__init__()
|
|
103
|
+
self.db = kuzu.Database(db)
|
|
104
|
+
|
|
105
|
+
self.setup_schema()
|
|
106
|
+
|
|
107
|
+
self.client = kuzu.AsyncConnection(self.db, max_concurrent_queries=max_concurrent_queries)
|
|
108
|
+
|
|
109
|
+
async def execute_query(
|
|
110
|
+
self, cypher_query_: str, **kwargs: Any
|
|
111
|
+
) -> tuple[list[dict[str, Any]] | list[list[dict[str, Any]]], None, None]:
|
|
112
|
+
params = {k: v for k, v in kwargs.items() if v is not None}
|
|
113
|
+
# Kuzu does not support these parameters.
|
|
114
|
+
params.pop('database_', None)
|
|
115
|
+
params.pop('routing_', None)
|
|
116
|
+
|
|
117
|
+
try:
|
|
118
|
+
results = await self.client.execute(cypher_query_, parameters=params)
|
|
119
|
+
except Exception as e:
|
|
120
|
+
params = {k: (v[:5] if isinstance(v, list) else v) for k, v in params.items()}
|
|
121
|
+
logger.error(f'Error executing Kuzu query: {e}\n{cypher_query_}\n{params}')
|
|
122
|
+
raise
|
|
123
|
+
|
|
124
|
+
if not results:
|
|
125
|
+
return [], None, None
|
|
126
|
+
|
|
127
|
+
if isinstance(results, list):
|
|
128
|
+
dict_results = [list(result.rows_as_dict()) for result in results]
|
|
129
|
+
else:
|
|
130
|
+
dict_results = list(results.rows_as_dict())
|
|
131
|
+
return dict_results, None, None # type: ignore
|
|
132
|
+
|
|
133
|
+
def session(self, _database: str | None = None) -> GraphDriverSession:
|
|
134
|
+
return KuzuDriverSession(self)
|
|
135
|
+
|
|
136
|
+
async def close(self):
|
|
137
|
+
# Do not explicitly close the connection, instead rely on GC.
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
def delete_all_indexes(self, database_: str):
|
|
141
|
+
pass
|
|
142
|
+
|
|
143
|
+
async def build_indices_and_constraints(self, delete_existing: bool = False):
|
|
144
|
+
# Kuzu doesn't support dynamic index creation like Neo4j or FalkorDB
|
|
145
|
+
# Schema and indices are created during setup_schema()
|
|
146
|
+
# This method is required by the abstract base class but is a no-op for Kuzu
|
|
147
|
+
pass
|
|
148
|
+
|
|
149
|
+
def setup_schema(self):
|
|
150
|
+
conn = kuzu.Connection(self.db)
|
|
151
|
+
conn.execute(SCHEMA_QUERIES)
|
|
152
|
+
conn.close()
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
class KuzuDriverSession(GraphDriverSession):
|
|
156
|
+
provider = GraphProvider.KUZU
|
|
157
|
+
|
|
158
|
+
def __init__(self, driver: KuzuDriver):
|
|
159
|
+
self.driver = driver
|
|
160
|
+
|
|
161
|
+
async def __aenter__(self):
|
|
162
|
+
return self
|
|
163
|
+
|
|
164
|
+
async def __aexit__(self, exc_type, exc, tb):
|
|
165
|
+
# No cleanup needed for Kuzu, but method must exist.
|
|
166
|
+
pass
|
|
167
|
+
|
|
168
|
+
async def close(self):
|
|
169
|
+
# Do not close the session here, as we're reusing the driver connection.
|
|
170
|
+
pass
|
|
171
|
+
|
|
172
|
+
async def execute_write(self, func, *args, **kwargs):
|
|
173
|
+
# Directly await the provided async function with `self` as the transaction/session
|
|
174
|
+
return await func(self, *args, **kwargs)
|
|
175
|
+
|
|
176
|
+
async def run(self, query: str | list, **kwargs: Any) -> Any:
|
|
177
|
+
if isinstance(query, list):
|
|
178
|
+
for cypher, params in query:
|
|
179
|
+
await self.driver.execute_query(cypher, **params)
|
|
180
|
+
else:
|
|
181
|
+
await self.driver.execute_query(query, **kwargs)
|
|
182
|
+
return None
|
|
@@ -19,17 +19,27 @@ from collections.abc import Coroutine
|
|
|
19
19
|
from typing import Any
|
|
20
20
|
|
|
21
21
|
from neo4j import AsyncGraphDatabase, EagerResult
|
|
22
|
+
from neo4j.exceptions import ClientError
|
|
22
23
|
from typing_extensions import LiteralString
|
|
23
24
|
|
|
24
|
-
from graphiti_core.driver.driver import GraphDriver, GraphDriverSession
|
|
25
|
+
from graphiti_core.driver.driver import GraphDriver, GraphDriverSession, GraphProvider
|
|
26
|
+
from graphiti_core.graph_queries import get_fulltext_indices, get_range_indices
|
|
27
|
+
from graphiti_core.helpers import semaphore_gather
|
|
25
28
|
|
|
26
29
|
logger = logging.getLogger(__name__)
|
|
27
30
|
|
|
28
31
|
|
|
29
32
|
class Neo4jDriver(GraphDriver):
|
|
30
|
-
provider
|
|
31
|
-
|
|
32
|
-
|
|
33
|
+
provider = GraphProvider.NEO4J
|
|
34
|
+
default_group_id: str = ''
|
|
35
|
+
|
|
36
|
+
def __init__(
|
|
37
|
+
self,
|
|
38
|
+
uri: str,
|
|
39
|
+
user: str | None,
|
|
40
|
+
password: str | None,
|
|
41
|
+
database: str = 'neo4j',
|
|
42
|
+
):
|
|
33
43
|
super().__init__()
|
|
34
44
|
self.client = AsyncGraphDatabase.driver(
|
|
35
45
|
uri=uri,
|
|
@@ -37,6 +47,20 @@ class Neo4jDriver(GraphDriver):
|
|
|
37
47
|
)
|
|
38
48
|
self._database = database
|
|
39
49
|
|
|
50
|
+
# Schedule the indices and constraints to be built
|
|
51
|
+
import asyncio
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
# Try to get the current event loop
|
|
55
|
+
loop = asyncio.get_running_loop()
|
|
56
|
+
# Schedule the build_indices_and_constraints to run
|
|
57
|
+
loop.create_task(self.build_indices_and_constraints())
|
|
58
|
+
except RuntimeError:
|
|
59
|
+
# No event loop running, this will be handled later
|
|
60
|
+
pass
|
|
61
|
+
|
|
62
|
+
self.aoss_client = None
|
|
63
|
+
|
|
40
64
|
async def execute_query(self, cypher_query_: LiteralString, **kwargs: Any) -> EagerResult:
|
|
41
65
|
# Check if database_ is provided in kwargs.
|
|
42
66
|
# If not populated, set the value to retain backwards compatibility
|
|
@@ -45,7 +69,11 @@ class Neo4jDriver(GraphDriver):
|
|
|
45
69
|
params = {}
|
|
46
70
|
params.setdefault('database_', self._database)
|
|
47
71
|
|
|
48
|
-
|
|
72
|
+
try:
|
|
73
|
+
result = await self.client.execute_query(cypher_query_, parameters_=params, **kwargs)
|
|
74
|
+
except Exception as e:
|
|
75
|
+
logger.error(f'Error executing Neo4j query: {e}\n{cypher_query_}\n{params}')
|
|
76
|
+
raise
|
|
49
77
|
|
|
50
78
|
return result
|
|
51
79
|
|
|
@@ -56,9 +84,43 @@ class Neo4jDriver(GraphDriver):
|
|
|
56
84
|
async def close(self) -> None:
|
|
57
85
|
return await self.client.close()
|
|
58
86
|
|
|
59
|
-
def delete_all_indexes(self
|
|
60
|
-
database = database_ or self._database
|
|
87
|
+
def delete_all_indexes(self) -> Coroutine:
|
|
61
88
|
return self.client.execute_query(
|
|
62
89
|
'CALL db.indexes() YIELD name DROP INDEX name',
|
|
63
|
-
database_=database,
|
|
64
90
|
)
|
|
91
|
+
|
|
92
|
+
async def _execute_index_query(self, query: LiteralString) -> EagerResult | None:
|
|
93
|
+
"""Execute an index creation query, ignoring 'index already exists' errors.
|
|
94
|
+
|
|
95
|
+
Neo4j can raise EquivalentSchemaRuleAlreadyExists when concurrent CREATE INDEX
|
|
96
|
+
IF NOT EXISTS queries race, even though the index exists. This is safe to ignore.
|
|
97
|
+
"""
|
|
98
|
+
try:
|
|
99
|
+
return await self.execute_query(query)
|
|
100
|
+
except ClientError as e:
|
|
101
|
+
# Ignore "equivalent index already exists" error (race condition with IF NOT EXISTS)
|
|
102
|
+
if 'EquivalentSchemaRuleAlreadyExists' in str(e):
|
|
103
|
+
logger.debug(f'Index already exists (concurrent creation): {query[:50]}...')
|
|
104
|
+
return None
|
|
105
|
+
raise
|
|
106
|
+
|
|
107
|
+
async def build_indices_and_constraints(self, delete_existing: bool = False):
|
|
108
|
+
if delete_existing:
|
|
109
|
+
await self.delete_all_indexes()
|
|
110
|
+
|
|
111
|
+
range_indices: list[LiteralString] = get_range_indices(self.provider)
|
|
112
|
+
|
|
113
|
+
fulltext_indices: list[LiteralString] = get_fulltext_indices(self.provider)
|
|
114
|
+
|
|
115
|
+
index_queries: list[LiteralString] = range_indices + fulltext_indices
|
|
116
|
+
|
|
117
|
+
await semaphore_gather(*[self._execute_index_query(query) for query in index_queries])
|
|
118
|
+
|
|
119
|
+
async def health_check(self) -> None:
|
|
120
|
+
"""Check Neo4j connectivity by running the driver's verify_connectivity method."""
|
|
121
|
+
try:
|
|
122
|
+
await self.client.verify_connectivity()
|
|
123
|
+
return None
|
|
124
|
+
except Exception as e:
|
|
125
|
+
print(f'Neo4j health check failed: {e}')
|
|
126
|
+
raise
|