graphiti-core 0.12.0rc1__py3-none-any.whl → 0.24.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. graphiti_core/cross_encoder/bge_reranker_client.py +12 -2
  2. graphiti_core/cross_encoder/gemini_reranker_client.py +161 -0
  3. graphiti_core/cross_encoder/openai_reranker_client.py +7 -5
  4. graphiti_core/decorators.py +110 -0
  5. graphiti_core/driver/__init__.py +19 -0
  6. graphiti_core/driver/driver.py +124 -0
  7. graphiti_core/driver/falkordb_driver.py +362 -0
  8. graphiti_core/driver/graph_operations/graph_operations.py +191 -0
  9. graphiti_core/driver/kuzu_driver.py +182 -0
  10. graphiti_core/driver/neo4j_driver.py +117 -0
  11. graphiti_core/driver/neptune_driver.py +305 -0
  12. graphiti_core/driver/search_interface/search_interface.py +89 -0
  13. graphiti_core/edges.py +287 -172
  14. graphiti_core/embedder/azure_openai.py +71 -0
  15. graphiti_core/embedder/client.py +2 -1
  16. graphiti_core/embedder/gemini.py +116 -22
  17. graphiti_core/embedder/voyage.py +13 -2
  18. graphiti_core/errors.py +8 -0
  19. graphiti_core/graph_queries.py +162 -0
  20. graphiti_core/graphiti.py +705 -193
  21. graphiti_core/graphiti_types.py +4 -2
  22. graphiti_core/helpers.py +87 -10
  23. graphiti_core/llm_client/__init__.py +16 -0
  24. graphiti_core/llm_client/anthropic_client.py +159 -56
  25. graphiti_core/llm_client/azure_openai_client.py +115 -0
  26. graphiti_core/llm_client/client.py +98 -21
  27. graphiti_core/llm_client/config.py +1 -1
  28. graphiti_core/llm_client/gemini_client.py +290 -41
  29. graphiti_core/llm_client/groq_client.py +14 -3
  30. graphiti_core/llm_client/openai_base_client.py +261 -0
  31. graphiti_core/llm_client/openai_client.py +56 -132
  32. graphiti_core/llm_client/openai_generic_client.py +91 -56
  33. graphiti_core/models/edges/edge_db_queries.py +259 -35
  34. graphiti_core/models/nodes/node_db_queries.py +311 -32
  35. graphiti_core/nodes.py +420 -205
  36. graphiti_core/prompts/dedupe_edges.py +46 -32
  37. graphiti_core/prompts/dedupe_nodes.py +67 -42
  38. graphiti_core/prompts/eval.py +4 -4
  39. graphiti_core/prompts/extract_edges.py +27 -16
  40. graphiti_core/prompts/extract_nodes.py +74 -31
  41. graphiti_core/prompts/prompt_helpers.py +39 -0
  42. graphiti_core/prompts/snippets.py +29 -0
  43. graphiti_core/prompts/summarize_nodes.py +23 -25
  44. graphiti_core/search/search.py +158 -82
  45. graphiti_core/search/search_config.py +39 -4
  46. graphiti_core/search/search_filters.py +126 -35
  47. graphiti_core/search/search_helpers.py +5 -6
  48. graphiti_core/search/search_utils.py +1405 -485
  49. graphiti_core/telemetry/__init__.py +9 -0
  50. graphiti_core/telemetry/telemetry.py +117 -0
  51. graphiti_core/tracer.py +193 -0
  52. graphiti_core/utils/bulk_utils.py +364 -285
  53. graphiti_core/utils/datetime_utils.py +13 -0
  54. graphiti_core/utils/maintenance/community_operations.py +67 -49
  55. graphiti_core/utils/maintenance/dedup_helpers.py +262 -0
  56. graphiti_core/utils/maintenance/edge_operations.py +339 -197
  57. graphiti_core/utils/maintenance/graph_data_operations.py +50 -114
  58. graphiti_core/utils/maintenance/node_operations.py +319 -238
  59. graphiti_core/utils/maintenance/temporal_operations.py +11 -3
  60. graphiti_core/utils/ontology_utils/entity_types_utils.py +1 -1
  61. graphiti_core/utils/text_utils.py +53 -0
  62. graphiti_core-0.24.3.dist-info/METADATA +726 -0
  63. graphiti_core-0.24.3.dist-info/RECORD +86 -0
  64. {graphiti_core-0.12.0rc1.dist-info → graphiti_core-0.24.3.dist-info}/WHEEL +1 -1
  65. graphiti_core-0.12.0rc1.dist-info/METADATA +0 -350
  66. graphiti_core-0.12.0rc1.dist-info/RECORD +0 -66
  67. /graphiti_core/{utils/maintenance/utils.py → migrations/__init__.py} +0 -0
  68. {graphiti_core-0.12.0rc1.dist-info → graphiti_core-0.24.3.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,362 @@
1
+ """
2
+ Copyright 2024, Zep Software, Inc.
3
+
4
+ Licensed under the Apache License, Version 2.0 (the "License");
5
+ you may not use this file except in compliance with the License.
6
+ You may obtain a copy of the License at
7
+
8
+ http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ Unless required by applicable law or agreed to in writing, software
11
+ distributed under the License is distributed on an "AS IS" BASIS,
12
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ See the License for the specific language governing permissions and
14
+ limitations under the License.
15
+ """
16
+
17
+ import asyncio
18
+ import datetime
19
+ import logging
20
+ from typing import TYPE_CHECKING, Any
21
+
22
+ if TYPE_CHECKING:
23
+ from falkordb import Graph as FalkorGraph
24
+ from falkordb.asyncio import FalkorDB
25
+ else:
26
+ try:
27
+ from falkordb import Graph as FalkorGraph
28
+ from falkordb.asyncio import FalkorDB
29
+ except ImportError:
30
+ # If falkordb is not installed, raise an ImportError
31
+ raise ImportError(
32
+ 'falkordb is required for FalkorDriver. '
33
+ 'Install it with: pip install graphiti-core[falkordb]'
34
+ ) from None
35
+
36
+ from graphiti_core.driver.driver import GraphDriver, GraphDriverSession, GraphProvider
37
+ from graphiti_core.graph_queries import get_fulltext_indices, get_range_indices
38
+ from graphiti_core.utils.datetime_utils import convert_datetimes_to_strings
39
+
40
+ logger = logging.getLogger(__name__)
41
+
42
+ STOPWORDS = [
43
+ 'a',
44
+ 'is',
45
+ 'the',
46
+ 'an',
47
+ 'and',
48
+ 'are',
49
+ 'as',
50
+ 'at',
51
+ 'be',
52
+ 'but',
53
+ 'by',
54
+ 'for',
55
+ 'if',
56
+ 'in',
57
+ 'into',
58
+ 'it',
59
+ 'no',
60
+ 'not',
61
+ 'of',
62
+ 'on',
63
+ 'or',
64
+ 'such',
65
+ 'that',
66
+ 'their',
67
+ 'then',
68
+ 'there',
69
+ 'these',
70
+ 'they',
71
+ 'this',
72
+ 'to',
73
+ 'was',
74
+ 'will',
75
+ 'with',
76
+ ]
77
+
78
+
79
+ class FalkorDriverSession(GraphDriverSession):
80
+ provider = GraphProvider.FALKORDB
81
+
82
+ def __init__(self, graph: FalkorGraph):
83
+ self.graph = graph
84
+
85
+ async def __aenter__(self):
86
+ return self
87
+
88
+ async def __aexit__(self, exc_type, exc, tb):
89
+ # No cleanup needed for Falkor, but method must exist
90
+ pass
91
+
92
+ async def close(self):
93
+ # No explicit close needed for FalkorDB, but method must exist
94
+ pass
95
+
96
+ async def execute_write(self, func, *args, **kwargs):
97
+ # Directly await the provided async function with `self` as the transaction/session
98
+ return await func(self, *args, **kwargs)
99
+
100
+ async def run(self, query: str | list, **kwargs: Any) -> Any:
101
+ # FalkorDB does not support argument for Label Set, so it's converted into an array of queries
102
+ if isinstance(query, list):
103
+ for cypher, params in query:
104
+ params = convert_datetimes_to_strings(params)
105
+ await self.graph.query(str(cypher), params) # type: ignore[reportUnknownArgumentType]
106
+ else:
107
+ params = dict(kwargs)
108
+ params = convert_datetimes_to_strings(params)
109
+ await self.graph.query(str(query), params) # type: ignore[reportUnknownArgumentType]
110
+ # Assuming `graph.query` is async (ideal); otherwise, wrap in executor
111
+ return None
112
+
113
+
114
+ class FalkorDriver(GraphDriver):
115
+ provider = GraphProvider.FALKORDB
116
+ default_group_id: str = '\\_'
117
+ fulltext_syntax: str = '@' # FalkorDB uses a redisearch-like syntax for fulltext queries
118
+ aoss_client: None = None
119
+
120
+ def __init__(
121
+ self,
122
+ host: str = 'localhost',
123
+ port: int = 6379,
124
+ username: str | None = None,
125
+ password: str | None = None,
126
+ falkor_db: FalkorDB | None = None,
127
+ database: str = 'default_db',
128
+ ):
129
+ """
130
+ Initialize the FalkorDB driver.
131
+
132
+ FalkorDB is a multi-tenant graph database.
133
+ To connect, provide the host and port.
134
+ The default parameters assume a local (on-premises) FalkorDB instance.
135
+
136
+ Args:
137
+ host (str): The host where FalkorDB is running.
138
+ port (int): The port on which FalkorDB is listening.
139
+ username (str | None): The username for authentication (if required).
140
+ password (str | None): The password for authentication (if required).
141
+ falkor_db (FalkorDB | None): An existing FalkorDB instance to use instead of creating a new one.
142
+ database (str): The name of the database to connect to. Defaults to 'default_db'.
143
+ """
144
+ super().__init__()
145
+ self._database = database
146
+ if falkor_db is not None:
147
+ # If a FalkorDB instance is provided, use it directly
148
+ self.client = falkor_db
149
+ else:
150
+ self.client = FalkorDB(host=host, port=port, username=username, password=password)
151
+
152
+ # Schedule the indices and constraints to be built
153
+ try:
154
+ # Try to get the current event loop
155
+ loop = asyncio.get_running_loop()
156
+ # Schedule the build_indices_and_constraints to run
157
+ loop.create_task(self.build_indices_and_constraints())
158
+ except RuntimeError:
159
+ # No event loop running, this will be handled later
160
+ pass
161
+
162
+ def _get_graph(self, graph_name: str | None) -> FalkorGraph:
163
+ # FalkorDB requires a non-None database name for multi-tenant graphs; the default is "default_db"
164
+ if graph_name is None:
165
+ graph_name = self._database
166
+ return self.client.select_graph(graph_name)
167
+
168
+ async def execute_query(self, cypher_query_, **kwargs: Any):
169
+ graph = self._get_graph(self._database)
170
+
171
+ # Convert datetime objects to ISO strings (FalkorDB does not support datetime objects directly)
172
+ params = convert_datetimes_to_strings(dict(kwargs))
173
+
174
+ try:
175
+ result = await graph.query(cypher_query_, params) # type: ignore[reportUnknownArgumentType]
176
+ except Exception as e:
177
+ if 'already indexed' in str(e):
178
+ # check if index already exists
179
+ logger.info(f'Index already exists: {e}')
180
+ return None
181
+ logger.error(f'Error executing FalkorDB query: {e}\n{cypher_query_}\n{params}')
182
+ raise
183
+
184
+ # Convert the result header to a list of strings
185
+ header = [h[1] for h in result.header]
186
+
187
+ # Convert FalkorDB's result format (list of lists) to the format expected by Graphiti (list of dicts)
188
+ records = []
189
+ for row in result.result_set:
190
+ record = {}
191
+ for i, field_name in enumerate(header):
192
+ if i < len(row):
193
+ record[field_name] = row[i]
194
+ else:
195
+ # If there are more fields in header than values in row, set to None
196
+ record[field_name] = None
197
+ records.append(record)
198
+
199
+ return records, header, None
200
+
201
+ def session(self, database: str | None = None) -> GraphDriverSession:
202
+ return FalkorDriverSession(self._get_graph(database))
203
+
204
+ async def close(self) -> None:
205
+ """Close the driver connection."""
206
+ if hasattr(self.client, 'aclose'):
207
+ await self.client.aclose() # type: ignore[reportUnknownMemberType]
208
+ elif hasattr(self.client.connection, 'aclose'):
209
+ await self.client.connection.aclose()
210
+ elif hasattr(self.client.connection, 'close'):
211
+ await self.client.connection.close()
212
+
213
+ async def delete_all_indexes(self) -> None:
214
+ result = await self.execute_query('CALL db.indexes()')
215
+ if not result:
216
+ return
217
+
218
+ records, _, _ = result
219
+ drop_tasks = []
220
+
221
+ for record in records:
222
+ label = record['label']
223
+ entity_type = record['entitytype']
224
+
225
+ for field_name, index_type in record['types'].items():
226
+ if 'RANGE' in index_type:
227
+ drop_tasks.append(self.execute_query(f'DROP INDEX ON :{label}({field_name})'))
228
+ elif 'FULLTEXT' in index_type:
229
+ if entity_type == 'NODE':
230
+ drop_tasks.append(
231
+ self.execute_query(
232
+ f'DROP FULLTEXT INDEX FOR (n:{label}) ON (n.{field_name})'
233
+ )
234
+ )
235
+ elif entity_type == 'RELATIONSHIP':
236
+ drop_tasks.append(
237
+ self.execute_query(
238
+ f'DROP FULLTEXT INDEX FOR ()-[e:{label}]-() ON (e.{field_name})'
239
+ )
240
+ )
241
+
242
+ if drop_tasks:
243
+ await asyncio.gather(*drop_tasks)
244
+
245
+ async def build_indices_and_constraints(self, delete_existing=False):
246
+ if delete_existing:
247
+ await self.delete_all_indexes()
248
+ index_queries = get_range_indices(self.provider) + get_fulltext_indices(self.provider)
249
+ for query in index_queries:
250
+ await self.execute_query(query)
251
+
252
+ def clone(self, database: str) -> 'GraphDriver':
253
+ """
254
+ Returns a shallow copy of this driver with a different default database.
255
+ Reuses the same connection (e.g. FalkorDB, Neo4j).
256
+ """
257
+ if database == self._database:
258
+ cloned = self
259
+ elif database == self.default_group_id:
260
+ cloned = FalkorDriver(falkor_db=self.client)
261
+ else:
262
+ # Create a new instance of FalkorDriver with the same connection but a different database
263
+ cloned = FalkorDriver(falkor_db=self.client, database=database)
264
+
265
+ return cloned
266
+
267
+ async def health_check(self) -> None:
268
+ """Check FalkorDB connectivity by running a simple query."""
269
+ try:
270
+ await self.execute_query('MATCH (n) RETURN 1 LIMIT 1')
271
+ return None
272
+ except Exception as e:
273
+ print(f'FalkorDB health check failed: {e}')
274
+ raise
275
+
276
+ @staticmethod
277
+ def convert_datetimes_to_strings(obj):
278
+ if isinstance(obj, dict):
279
+ return {k: FalkorDriver.convert_datetimes_to_strings(v) for k, v in obj.items()}
280
+ elif isinstance(obj, list):
281
+ return [FalkorDriver.convert_datetimes_to_strings(item) for item in obj]
282
+ elif isinstance(obj, tuple):
283
+ return tuple(FalkorDriver.convert_datetimes_to_strings(item) for item in obj)
284
+ elif isinstance(obj, datetime):
285
+ return obj.isoformat()
286
+ else:
287
+ return obj
288
+
289
+ def sanitize(self, query: str) -> str:
290
+ """
291
+ Replace FalkorDB special characters with whitespace.
292
+ Based on FalkorDB tokenization rules: ,.<>{}[]"':;!@#$%^&*()-+=~
293
+ """
294
+ # FalkorDB separator characters that break text into tokens
295
+ separator_map = str.maketrans(
296
+ {
297
+ ',': ' ',
298
+ '.': ' ',
299
+ '<': ' ',
300
+ '>': ' ',
301
+ '{': ' ',
302
+ '}': ' ',
303
+ '[': ' ',
304
+ ']': ' ',
305
+ '"': ' ',
306
+ "'": ' ',
307
+ ':': ' ',
308
+ ';': ' ',
309
+ '!': ' ',
310
+ '@': ' ',
311
+ '#': ' ',
312
+ '$': ' ',
313
+ '%': ' ',
314
+ '^': ' ',
315
+ '&': ' ',
316
+ '*': ' ',
317
+ '(': ' ',
318
+ ')': ' ',
319
+ '-': ' ',
320
+ '+': ' ',
321
+ '=': ' ',
322
+ '~': ' ',
323
+ '?': ' ',
324
+ }
325
+ )
326
+ sanitized = query.translate(separator_map)
327
+ # Clean up multiple spaces
328
+ sanitized = ' '.join(sanitized.split())
329
+ return sanitized
330
+
331
+ def build_fulltext_query(
332
+ self, query: str, group_ids: list[str] | None = None, max_query_length: int = 128
333
+ ) -> str:
334
+ """
335
+ Build a fulltext query string for FalkorDB using RedisSearch syntax.
336
+ FalkorDB uses RedisSearch-like syntax where:
337
+ - Field queries use @ prefix: @field:value
338
+ - Multiple values for same field: (@field:value1|value2)
339
+ - Text search doesn't need @ prefix for content fields
340
+ - AND is implicit with space: (@group_id:value) (text)
341
+ - OR uses pipe within parentheses: (@group_id:value1|value2)
342
+ """
343
+ if group_ids is None or len(group_ids) == 0:
344
+ group_filter = ''
345
+ else:
346
+ group_values = '|'.join(group_ids)
347
+ group_filter = f'(@group_id:{group_values})'
348
+
349
+ sanitized_query = self.sanitize(query)
350
+
351
+ # Remove stopwords from the sanitized query
352
+ query_words = sanitized_query.split()
353
+ filtered_words = [word for word in query_words if word.lower() not in STOPWORDS]
354
+ sanitized_query = ' | '.join(filtered_words)
355
+
356
+ # If the query is too long return no query
357
+ if len(sanitized_query.split(' ')) + len(group_ids or '') >= max_query_length:
358
+ return ''
359
+
360
+ full_query = group_filter + ' (' + sanitized_query + ')'
361
+
362
+ return full_query
@@ -0,0 +1,191 @@
1
+ """
2
+ Copyright 2024, Zep Software, Inc.
3
+
4
+ Licensed under the Apache License, Version 2.0 (the "License");
5
+ you may not use this file except in compliance with the License.
6
+ You may obtain a copy of the License at
7
+
8
+ http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ Unless required by applicable law or agreed to in writing, software
11
+ distributed under the License is distributed on an "AS IS" BASIS,
12
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ See the License for the specific language governing permissions and
14
+ limitations under the License.
15
+ """
16
+
17
+ from typing import Any
18
+
19
+ from pydantic import BaseModel
20
+
21
+
22
+ class GraphOperationsInterface(BaseModel):
23
+ """
24
+ Interface for updating graph mutation behavior.
25
+ """
26
+
27
+ # -----------------
28
+ # Node: Save/Delete
29
+ # -----------------
30
+
31
+ async def node_save(self, node: Any, driver: Any) -> None:
32
+ """Persist (create or update) a single node."""
33
+ raise NotImplementedError
34
+
35
+ async def node_delete(self, node: Any, driver: Any) -> None:
36
+ raise NotImplementedError
37
+
38
+ async def node_save_bulk(
39
+ self,
40
+ _cls: Any, # kept for parity; callers won't pass it
41
+ driver: Any,
42
+ transaction: Any,
43
+ nodes: list[Any],
44
+ batch_size: int = 100,
45
+ ) -> None:
46
+ """Persist (create or update) many nodes in batches."""
47
+ raise NotImplementedError
48
+
49
+ async def node_delete_by_group_id(
50
+ self,
51
+ _cls: Any,
52
+ driver: Any,
53
+ group_id: str,
54
+ batch_size: int = 100,
55
+ ) -> None:
56
+ raise NotImplementedError
57
+
58
+ async def node_delete_by_uuids(
59
+ self,
60
+ _cls: Any,
61
+ driver: Any,
62
+ uuids: list[str],
63
+ group_id: str | None = None,
64
+ batch_size: int = 100,
65
+ ) -> None:
66
+ raise NotImplementedError
67
+
68
+ # --------------------------
69
+ # Node: Embeddings (load)
70
+ # --------------------------
71
+
72
+ async def node_load_embeddings(self, node: Any, driver: Any) -> None:
73
+ """
74
+ Load embedding vectors for a single node into the instance (e.g., set node.embedding or similar).
75
+ """
76
+ raise NotImplementedError
77
+
78
+ async def node_load_embeddings_bulk(
79
+ self,
80
+ driver: Any,
81
+ nodes: list[Any],
82
+ batch_size: int = 100,
83
+ ) -> dict[str, list[float]]:
84
+ """
85
+ Load embedding vectors for many nodes in batches.
86
+ """
87
+ raise NotImplementedError
88
+
89
+ # --------------------------
90
+ # EpisodicNode: Save/Delete
91
+ # --------------------------
92
+
93
+ async def episodic_node_save(self, node: Any, driver: Any) -> None:
94
+ """Persist (create or update) a single episodic node."""
95
+ raise NotImplementedError
96
+
97
+ async def episodic_node_delete(self, node: Any, driver: Any) -> None:
98
+ raise NotImplementedError
99
+
100
+ async def episodic_node_save_bulk(
101
+ self,
102
+ _cls: Any,
103
+ driver: Any,
104
+ transaction: Any,
105
+ nodes: list[Any],
106
+ batch_size: int = 100,
107
+ ) -> None:
108
+ """Persist (create or update) many episodic nodes in batches."""
109
+ raise NotImplementedError
110
+
111
+ async def episodic_edge_save_bulk(
112
+ self,
113
+ _cls: Any,
114
+ driver: Any,
115
+ transaction: Any,
116
+ episodic_edges: list[Any],
117
+ batch_size: int = 100,
118
+ ) -> None:
119
+ """Persist (create or update) many episodic edges in batches."""
120
+ raise NotImplementedError
121
+
122
+ async def episodic_node_delete_by_group_id(
123
+ self,
124
+ _cls: Any,
125
+ driver: Any,
126
+ group_id: str,
127
+ batch_size: int = 100,
128
+ ) -> None:
129
+ raise NotImplementedError
130
+
131
+ async def episodic_node_delete_by_uuids(
132
+ self,
133
+ _cls: Any,
134
+ driver: Any,
135
+ uuids: list[str],
136
+ group_id: str | None = None,
137
+ batch_size: int = 100,
138
+ ) -> None:
139
+ raise NotImplementedError
140
+
141
+ # -----------------
142
+ # Edge: Save/Delete
143
+ # -----------------
144
+
145
+ async def edge_save(self, edge: Any, driver: Any) -> None:
146
+ """Persist (create or update) a single edge."""
147
+ raise NotImplementedError
148
+
149
+ async def edge_delete(self, edge: Any, driver: Any) -> None:
150
+ raise NotImplementedError
151
+
152
+ async def edge_save_bulk(
153
+ self,
154
+ _cls: Any,
155
+ driver: Any,
156
+ transaction: Any,
157
+ edges: list[Any],
158
+ batch_size: int = 100,
159
+ ) -> None:
160
+ """Persist (create or update) many edges in batches."""
161
+ raise NotImplementedError
162
+
163
+ async def edge_delete_by_uuids(
164
+ self,
165
+ _cls: Any,
166
+ driver: Any,
167
+ uuids: list[str],
168
+ group_id: str | None = None,
169
+ ) -> None:
170
+ raise NotImplementedError
171
+
172
+ # -----------------
173
+ # Edge: Embeddings (load)
174
+ # -----------------
175
+
176
+ async def edge_load_embeddings(self, edge: Any, driver: Any) -> None:
177
+ """
178
+ Load embedding vectors for a single edge into the instance (e.g., set edge.embedding or similar).
179
+ """
180
+ raise NotImplementedError
181
+
182
+ async def edge_load_embeddings_bulk(
183
+ self,
184
+ driver: Any,
185
+ edges: list[Any],
186
+ batch_size: int = 100,
187
+ ) -> dict[str, list[float]]:
188
+ """
189
+ Load embedding vectors for many edges in batches
190
+ """
191
+ raise NotImplementedError