gllm-datastore-binary 0.5.50__cp312-cp312-macosx_13_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gllm_datastore/__init__.pyi +0 -0
- gllm_datastore/cache/__init__.pyi +4 -0
- gllm_datastore/cache/base.pyi +84 -0
- gllm_datastore/cache/cache.pyi +137 -0
- gllm_datastore/cache/hybrid_cache/__init__.pyi +5 -0
- gllm_datastore/cache/hybrid_cache/file_system_hybrid_cache.pyi +50 -0
- gllm_datastore/cache/hybrid_cache/hybrid_cache.pyi +115 -0
- gllm_datastore/cache/hybrid_cache/in_memory_hybrid_cache.pyi +29 -0
- gllm_datastore/cache/hybrid_cache/key_matcher/__init__.pyi +5 -0
- gllm_datastore/cache/hybrid_cache/key_matcher/exact_key_matcher.pyi +44 -0
- gllm_datastore/cache/hybrid_cache/key_matcher/fuzzy_key_matcher.pyi +70 -0
- gllm_datastore/cache/hybrid_cache/key_matcher/key_matcher.pyi +60 -0
- gllm_datastore/cache/hybrid_cache/key_matcher/semantic_key_matcher.pyi +93 -0
- gllm_datastore/cache/hybrid_cache/redis_hybrid_cache.pyi +34 -0
- gllm_datastore/cache/hybrid_cache/utils.pyi +36 -0
- gllm_datastore/cache/utils.pyi +34 -0
- gllm_datastore/cache/vector_cache/__init__.pyi +0 -0
- gllm_datastore/cache/vector_cache/eviction_manager/__init__.pyi +0 -0
- gllm_datastore/cache/vector_cache/eviction_manager/asyncio_eviction_manager.pyi +48 -0
- gllm_datastore/cache/vector_cache/eviction_manager/eviction_manager.pyi +38 -0
- gllm_datastore/cache/vector_cache/eviction_strategy/__init__.pyi +0 -0
- gllm_datastore/cache/vector_cache/eviction_strategy/eviction_strategy.pyi +34 -0
- gllm_datastore/cache/vector_cache/eviction_strategy/ttl_eviction_strategy.pyi +34 -0
- gllm_datastore/cache/vector_cache/vector_cache.pyi +99 -0
- gllm_datastore/constants.pyi +66 -0
- gllm_datastore/core/__init__.pyi +7 -0
- gllm_datastore/core/capabilities/__init__.pyi +7 -0
- gllm_datastore/core/capabilities/encryption_capability.pyi +21 -0
- gllm_datastore/core/capabilities/fulltext_capability.pyi +73 -0
- gllm_datastore/core/capabilities/graph_capability.pyi +70 -0
- gllm_datastore/core/capabilities/hybrid_capability.pyi +184 -0
- gllm_datastore/core/capabilities/vector_capability.pyi +90 -0
- gllm_datastore/core/filters/__init__.pyi +4 -0
- gllm_datastore/core/filters/filter.pyi +340 -0
- gllm_datastore/core/filters/schema.pyi +149 -0
- gllm_datastore/data_store/__init__.pyi +8 -0
- gllm_datastore/data_store/_elastic_core/__init__.pyi +0 -0
- gllm_datastore/data_store/_elastic_core/client_factory.pyi +66 -0
- gllm_datastore/data_store/_elastic_core/constants.pyi +27 -0
- gllm_datastore/data_store/_elastic_core/elastic_like_core.pyi +115 -0
- gllm_datastore/data_store/_elastic_core/index_manager.pyi +37 -0
- gllm_datastore/data_store/_elastic_core/query_translator.pyi +89 -0
- gllm_datastore/data_store/base.pyi +176 -0
- gllm_datastore/data_store/chroma/__init__.pyi +4 -0
- gllm_datastore/data_store/chroma/_chroma_import.pyi +13 -0
- gllm_datastore/data_store/chroma/data_store.pyi +201 -0
- gllm_datastore/data_store/chroma/fulltext.pyi +134 -0
- gllm_datastore/data_store/chroma/query.pyi +266 -0
- gllm_datastore/data_store/chroma/query_translator.pyi +41 -0
- gllm_datastore/data_store/chroma/vector.pyi +197 -0
- gllm_datastore/data_store/elasticsearch/__init__.pyi +5 -0
- gllm_datastore/data_store/elasticsearch/data_store.pyi +147 -0
- gllm_datastore/data_store/elasticsearch/fulltext.pyi +238 -0
- gllm_datastore/data_store/elasticsearch/query.pyi +118 -0
- gllm_datastore/data_store/elasticsearch/query_translator.pyi +18 -0
- gllm_datastore/data_store/elasticsearch/vector.pyi +180 -0
- gllm_datastore/data_store/exceptions.pyi +35 -0
- gllm_datastore/data_store/in_memory/__init__.pyi +5 -0
- gllm_datastore/data_store/in_memory/data_store.pyi +71 -0
- gllm_datastore/data_store/in_memory/fulltext.pyi +131 -0
- gllm_datastore/data_store/in_memory/query.pyi +175 -0
- gllm_datastore/data_store/in_memory/vector.pyi +174 -0
- gllm_datastore/data_store/opensearch/__init__.pyi +5 -0
- gllm_datastore/data_store/opensearch/data_store.pyi +160 -0
- gllm_datastore/data_store/opensearch/fulltext.pyi +240 -0
- gllm_datastore/data_store/opensearch/query.pyi +89 -0
- gllm_datastore/data_store/opensearch/query_translator.pyi +18 -0
- gllm_datastore/data_store/opensearch/vector.pyi +211 -0
- gllm_datastore/data_store/redis/__init__.pyi +5 -0
- gllm_datastore/data_store/redis/data_store.pyi +153 -0
- gllm_datastore/data_store/redis/fulltext.pyi +128 -0
- gllm_datastore/data_store/redis/query.pyi +428 -0
- gllm_datastore/data_store/redis/query_translator.pyi +37 -0
- gllm_datastore/data_store/redis/vector.pyi +131 -0
- gllm_datastore/data_store/sql/__init__.pyi +4 -0
- gllm_datastore/data_store/sql/constants.pyi +5 -0
- gllm_datastore/data_store/sql/data_store.pyi +201 -0
- gllm_datastore/data_store/sql/fulltext.pyi +164 -0
- gllm_datastore/data_store/sql/query.pyi +81 -0
- gllm_datastore/data_store/sql/query_translator.pyi +51 -0
- gllm_datastore/data_store/sql/schema.pyi +16 -0
- gllm_datastore/encryptor/__init__.pyi +4 -0
- gllm_datastore/encryptor/aes_gcm_encryptor.pyi +45 -0
- gllm_datastore/encryptor/capability/__init__.pyi +3 -0
- gllm_datastore/encryptor/capability/mixin.pyi +32 -0
- gllm_datastore/encryptor/encryptor.pyi +52 -0
- gllm_datastore/encryptor/key_ring/__init__.pyi +3 -0
- gllm_datastore/encryptor/key_ring/in_memory_key_ring.pyi +52 -0
- gllm_datastore/encryptor/key_ring/key_ring.pyi +45 -0
- gllm_datastore/encryptor/key_rotating_encryptor.pyi +60 -0
- gllm_datastore/graph_data_store/__init__.pyi +6 -0
- gllm_datastore/graph_data_store/graph_data_store.pyi +151 -0
- gllm_datastore/graph_data_store/graph_rag_data_store.pyi +29 -0
- gllm_datastore/graph_data_store/light_rag_data_store.pyi +93 -0
- gllm_datastore/graph_data_store/light_rag_postgres_data_store.pyi +96 -0
- gllm_datastore/graph_data_store/llama_index_graph_rag_data_store.pyi +49 -0
- gllm_datastore/graph_data_store/llama_index_neo4j_graph_rag_data_store.pyi +78 -0
- gllm_datastore/graph_data_store/mixins/__init__.pyi +3 -0
- gllm_datastore/graph_data_store/mixins/agentic_graph_tools_mixin.pyi +175 -0
- gllm_datastore/graph_data_store/nebula_graph_data_store.pyi +206 -0
- gllm_datastore/graph_data_store/neo4j_graph_data_store.pyi +182 -0
- gllm_datastore/graph_data_store/schema.pyi +27 -0
- gllm_datastore/graph_data_store/utils/__init__.pyi +6 -0
- gllm_datastore/graph_data_store/utils/constants.pyi +21 -0
- gllm_datastore/graph_data_store/utils/light_rag_em_invoker_adapter.pyi +56 -0
- gllm_datastore/graph_data_store/utils/light_rag_lm_invoker_adapter.pyi +43 -0
- gllm_datastore/graph_data_store/utils/llama_index_em_invoker_adapter.pyi +45 -0
- gllm_datastore/graph_data_store/utils/llama_index_lm_invoker_adapter.pyi +169 -0
- gllm_datastore/signature/__init__.pyi +0 -0
- gllm_datastore/signature/webhook_signature.pyi +31 -0
- gllm_datastore/sql_data_store/__init__.pyi +4 -0
- gllm_datastore/sql_data_store/adapter/__init__.pyi +0 -0
- gllm_datastore/sql_data_store/adapter/sqlalchemy_adapter.pyi +38 -0
- gllm_datastore/sql_data_store/constants.pyi +6 -0
- gllm_datastore/sql_data_store/sql_data_store.pyi +86 -0
- gllm_datastore/sql_data_store/sqlalchemy_sql_data_store.pyi +216 -0
- gllm_datastore/sql_data_store/types.pyi +31 -0
- gllm_datastore/utils/__init__.pyi +6 -0
- gllm_datastore/utils/converter.pyi +51 -0
- gllm_datastore/utils/dict.pyi +21 -0
- gllm_datastore/utils/ttl.pyi +25 -0
- gllm_datastore/utils/types.pyi +32 -0
- gllm_datastore/vector_data_store/__init__.pyi +6 -0
- gllm_datastore/vector_data_store/chroma_vector_data_store.pyi +259 -0
- gllm_datastore/vector_data_store/elasticsearch_vector_data_store.pyi +357 -0
- gllm_datastore/vector_data_store/in_memory_vector_data_store.pyi +179 -0
- gllm_datastore/vector_data_store/mixin/__init__.pyi +0 -0
- gllm_datastore/vector_data_store/mixin/cache_compatible_mixin.pyi +145 -0
- gllm_datastore/vector_data_store/redis_vector_data_store.pyi +191 -0
- gllm_datastore/vector_data_store/vector_data_store.pyi +146 -0
- gllm_datastore.build/.gitignore +1 -0
- gllm_datastore.cpython-312-darwin.so +0 -0
- gllm_datastore.pyi +178 -0
- gllm_datastore_binary-0.5.50.dist-info/METADATA +185 -0
- gllm_datastore_binary-0.5.50.dist-info/RECORD +137 -0
- gllm_datastore_binary-0.5.50.dist-info/WHEEL +5 -0
- gllm_datastore_binary-0.5.50.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from gllm_core.schema import Node as Node
|
|
3
|
+
from gllm_datastore.graph_data_store.schema import RelationshipDirection as RelationshipDirection, SearchPosition as SearchPosition, Triplet as Triplet
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
class AgenticGraphToolsMixin(ABC):
|
|
7
|
+
"""Mixin class providing agentic graph tools for graph exploration.
|
|
8
|
+
|
|
9
|
+
This mixin adds methods for graph database operations including:
|
|
10
|
+
- Safe query execution (read-only)
|
|
11
|
+
- Neighborhood exploration
|
|
12
|
+
- Node and relationship search
|
|
13
|
+
- Context-sensitive autocomplete
|
|
14
|
+
|
|
15
|
+
Note: Base query() method remains unchanged and allows write operations
|
|
16
|
+
read_only_query() provides read-only guarantee for safety-critical scenarios
|
|
17
|
+
"""
|
|
18
|
+
@abstractmethod
|
|
19
|
+
async def read_only_query(self, query: str, parameters: dict[str, Any] | None = None, max_results: int = 100, timeout: int = 60) -> list[dict[str, Any]]:
|
|
20
|
+
"""Execute a validated read-only query.
|
|
21
|
+
|
|
22
|
+
Differentiates from base query() by enforcing read-only validation
|
|
23
|
+
Base query() allows write operations (CREATE, DELETE, MERGE, SET, REMOVE)
|
|
24
|
+
This method blocks all write operations for safety
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
query (str): Query string to execute.
|
|
28
|
+
parameters (dict[str, Any] | None, optional): Query parameters. Defaults to None.
|
|
29
|
+
max_results (int, optional): Maximum results to return. Defaults to 100.
|
|
30
|
+
timeout (int, optional): Query timeout in seconds. Defaults to 60.
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
list[dict[str, Any]]: List of result dictionaries.
|
|
34
|
+
|
|
35
|
+
Raises:
|
|
36
|
+
NotImplementedError: This is an abstract method that must be implemented by subclasses.
|
|
37
|
+
"""
|
|
38
|
+
@abstractmethod
|
|
39
|
+
async def get_neighborhood(self, node_id: str | None = None, relationship_type: str | None = None, target_node_id: str | None = None, limit: int = 10) -> list[Triplet]:
|
|
40
|
+
"""Get graph patterns matching partial constraints.
|
|
41
|
+
|
|
42
|
+
Provide at least one of: node_id, relationship_type, or target_node_id
|
|
43
|
+
Returns up to N diverse patterns to help understand graph structure
|
|
44
|
+
|
|
45
|
+
This method differs from traverse() in base graph_data_store
|
|
46
|
+
- traverse() follows a specific path through the graph from a starting node
|
|
47
|
+
- get_neighborhood() discovers patterns matching given constraints without
|
|
48
|
+
requiring a specific traversal path or starting point.
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
node_id (str | None, optional): Source node ID (property-based). Defaults to None.
|
|
52
|
+
relationship_type (str | None, optional): Relationship type to filter by. Defaults to None.
|
|
53
|
+
target_node_id (str | None, optional): Target node ID (property-based). Defaults to None.
|
|
54
|
+
limit (int, optional): Maximum number of patterns to return. Defaults to 10.
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
list[Triplet]: List of triplets containing source-relationship-target patterns.
|
|
58
|
+
|
|
59
|
+
Raises:
|
|
60
|
+
NotImplementedError: This is an abstract method that must be implemented by subclasses.
|
|
61
|
+
"""
|
|
62
|
+
@abstractmethod
|
|
63
|
+
async def search_node(self, query: str, node_label: str | None = None, limit: int = 10) -> list[Node]:
|
|
64
|
+
"""Search for nodes using substring matching.
|
|
65
|
+
|
|
66
|
+
Searches across common properties like id, name, title, description
|
|
67
|
+
Uses case-insensitive substring matching
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
query (str): Search query string.
|
|
71
|
+
node_label (str | None, optional): Optional node label to filter by. Defaults to None.
|
|
72
|
+
limit (int, optional): Maximum number of results to return. Defaults to 10.
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
list[Node]: List of matching nodes.
|
|
76
|
+
|
|
77
|
+
Raises:
|
|
78
|
+
NotImplementedError: This is an abstract method that must be implemented by subclasses.
|
|
79
|
+
"""
|
|
80
|
+
@abstractmethod
|
|
81
|
+
async def search_relationship(self, query: str, node_label: str | None = None, limit: int = 10) -> list[Triplet]:
|
|
82
|
+
"""Search for relationship types using substring matching.
|
|
83
|
+
|
|
84
|
+
Returns relationship types that exist in graph with usage counts
|
|
85
|
+
Helps avoid hallucinating relationship types
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
query (str): Search query string.
|
|
89
|
+
node_label (str | None, optional): Optional node label to filter relationships. Defaults to None.
|
|
90
|
+
limit (int, optional): Maximum number of results to return. Defaults to 10.
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
list[Triplet]: List of triplets representing relationship types found in the graph.
|
|
94
|
+
|
|
95
|
+
Raises:
|
|
96
|
+
NotImplementedError: This is an abstract method that must be implemented by subclasses.
|
|
97
|
+
"""
|
|
98
|
+
@abstractmethod
|
|
99
|
+
async def search_rel_of_node(self, query: str, node_id: str, direction: RelationshipDirection = ..., limit: int = 10) -> list[Triplet]:
|
|
100
|
+
"""Search for relationship types for a specific node.
|
|
101
|
+
|
|
102
|
+
More context-sensitive than general relationship search
|
|
103
|
+
Only returns relationships that connect to the given node
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
query (str): Search query string.
|
|
107
|
+
node_id (str, optional): Node ID to search relationships for. Defaults to None.
|
|
108
|
+
direction (RelationshipDirection, optional): Relationship direction. Defaults to BOTH.
|
|
109
|
+
limit (int, optional): Maximum number of results to return. Defaults to 10.
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
list[Triplet]: List of triplets for the node.
|
|
113
|
+
|
|
114
|
+
Raises:
|
|
115
|
+
NotImplementedError: This is an abstract method that must be implemented by subclasses.
|
|
116
|
+
"""
|
|
117
|
+
@abstractmethod
|
|
118
|
+
async def search_target_of_rel(self, query: str, relationship_type: str, source_node_id: str | None = None, limit: int = 10) -> list[Node]:
|
|
119
|
+
"""Search for target nodes reachable via relationship type.
|
|
120
|
+
|
|
121
|
+
If source_node_id provided, finds targets from that node only
|
|
122
|
+
Otherwise finds all nodes reachable via relationship type
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
query (str): Search query string.
|
|
126
|
+
relationship_type (str, optional): Relationship type to traverse. Defaults to None.
|
|
127
|
+
source_node_id (str | None, optional): Optional source node to start from. Defaults to None.
|
|
128
|
+
limit (int, optional): Maximum number of results to return. Defaults to 10.
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
list[Node]: List of target nodes matching query.
|
|
132
|
+
|
|
133
|
+
Raises:
|
|
134
|
+
NotImplementedError: This is an abstract method that must be implemented by subclasses.
|
|
135
|
+
"""
|
|
136
|
+
@abstractmethod
|
|
137
|
+
async def search_autocomplete(self, query: str, query_pattern: str, search_var: str, limit: int = 10) -> list[Node]:
|
|
138
|
+
"""Context-sensitive search constrained by query pattern.
|
|
139
|
+
|
|
140
|
+
Executes a partial query pattern and searches for nodes.
|
|
141
|
+
that can be bound to search_var. Pattern provides context.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
query (str): Search query string.
|
|
145
|
+
query_pattern (str, optional): Partial query with variable placeholder. Defaults to None.
|
|
146
|
+
search_var (str, optional): Variable name to search for (e.g., 'company'). Defaults to None.
|
|
147
|
+
limit (int, optional): Maximum number of results to return. Defaults to 10.
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
list[Node]: List of nodes matching query within pattern context.
|
|
151
|
+
|
|
152
|
+
Raises:
|
|
153
|
+
NotImplementedError: This is an abstract method that must be implemented by subclasses.
|
|
154
|
+
"""
|
|
155
|
+
@abstractmethod
|
|
156
|
+
async def search_constrained(self, query: str, position: SearchPosition, source_node_id: str | None = None, relationship_type: str | None = None, target_node_id: str | None = None, limit: int = 10) -> list[Node] | list[Triplet]:
|
|
157
|
+
"""Search for items in pattern position under constraints.
|
|
158
|
+
|
|
159
|
+
Build a pattern with constraints and search for items in specified position (source, relationship, or target).
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
query (str): Search query string.
|
|
163
|
+
position (SearchPosition): What to search for (SOURCE, RELATIONSHIP, or TARGET).
|
|
164
|
+
source_node_id (str | None, optional): Constraint on source node. Defaults to None.
|
|
165
|
+
relationship_type (str | None, optional): Constraint on relationship type. Defaults to None.
|
|
166
|
+
target_node_id (str | None, optional): Constraint on target node. Defaults to None.
|
|
167
|
+
limit (int, optional): Maximum number of results to return. Defaults to 10.
|
|
168
|
+
|
|
169
|
+
Returns:
|
|
170
|
+
list[Node] | list[Triplet]: List of nodes (if position is SOURCE/TARGET).
|
|
171
|
+
or triplets (if position is RELATIONSHIP).
|
|
172
|
+
|
|
173
|
+
Raises:
|
|
174
|
+
NotImplementedError: This is an abstract method that must be implemented by subclasses.
|
|
175
|
+
"""
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from gllm_datastore.graph_data_store.graph_data_store import BaseGraphDataStore as BaseGraphDataStore
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
class NebulaGraphDataStore(BaseGraphDataStore):
|
|
6
|
+
'''Implementation of BaseGraphDataStore for Nebula Graph.
|
|
7
|
+
|
|
8
|
+
This class provides an interface for graph-based Retrieval-Augmented Generation (RAG)
|
|
9
|
+
operations on Nebula graph databases.
|
|
10
|
+
|
|
11
|
+
Attributes:
|
|
12
|
+
connection_pool (ConnectionPool): The connection pool for Nebula Graph.
|
|
13
|
+
space (str): The space name.
|
|
14
|
+
user (str): The username.
|
|
15
|
+
password (str): The password.
|
|
16
|
+
operation_wait_time (int): The timeout in seconds.
|
|
17
|
+
|
|
18
|
+
Example:
|
|
19
|
+
```python
|
|
20
|
+
store = NebulaGraphDataStore(
|
|
21
|
+
url="127.0.0.1",
|
|
22
|
+
port=9669,
|
|
23
|
+
user="root",
|
|
24
|
+
password="nebula",
|
|
25
|
+
space="testing"
|
|
26
|
+
)
|
|
27
|
+
# Perform query
|
|
28
|
+
results = await store.query("MATCH (n) RETURN n")
|
|
29
|
+
|
|
30
|
+
# Create a node
|
|
31
|
+
node = await store.upsert_node("Person", "name", "John", {"age": 30})
|
|
32
|
+
```
|
|
33
|
+
'''
|
|
34
|
+
connection_pool: Incomplete
|
|
35
|
+
space: Incomplete
|
|
36
|
+
user: Incomplete
|
|
37
|
+
password: Incomplete
|
|
38
|
+
operation_wait_time: Incomplete
|
|
39
|
+
def __init__(self, url: str, port: int, user: str, password: str, space: str, operation_wait_time: int = 5) -> None:
|
|
40
|
+
"""Initialize NebulaGraphDataStore.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
url (str): The URL of the graph store.
|
|
44
|
+
port (int): The port of the graph store.
|
|
45
|
+
user (str): The user of the graph store.
|
|
46
|
+
password (str): The password of the graph store.
|
|
47
|
+
space (str): The space name.
|
|
48
|
+
operation_wait_time (int, optional): The operation wait time in seconds. Defaults to 5.
|
|
49
|
+
"""
|
|
50
|
+
async def upsert_node(self, label: str, identifier_key: str, identifier_value: str, properties: dict[str, Any] | None = None) -> Any:
|
|
51
|
+
"""Upsert a node in the graph.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
label (str): The label of the node.
|
|
55
|
+
identifier_key (str): The key of the identifier.
|
|
56
|
+
identifier_value (str): The value of the identifier.
|
|
57
|
+
properties (dict[str, Any] | None, optional): The properties of the node. Defaults to None.
|
|
58
|
+
|
|
59
|
+
Returns:
|
|
60
|
+
Any: The result of the operation.
|
|
61
|
+
"""
|
|
62
|
+
async def upsert_relationship(self, node_source_key: str, node_source_value: str, relation: str, node_target_key: str, node_target_value: str, properties: dict[str, Any] | None = None) -> Any:
|
|
63
|
+
"""Upsert a relationship between two nodes in the graph.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
node_source_key (str): The key of the source node.
|
|
67
|
+
node_source_value (str): The value of the source node.
|
|
68
|
+
relation (str): The type of the relationship.
|
|
69
|
+
node_target_key (str): The key of the target node.
|
|
70
|
+
node_target_value (str): The value of the target node.
|
|
71
|
+
properties (dict[str, Any] | None, optional): The properties of the relationship. Defaults to None.
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
Any: The result of the operation.
|
|
75
|
+
"""
|
|
76
|
+
async def delete_node(self, label: str, identifier_key: str, identifier_value: str) -> Any:
|
|
77
|
+
"""Delete a node from the graph.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
label (str): The label of the node.
|
|
81
|
+
identifier_key (str): The key of the identifier.
|
|
82
|
+
identifier_value (str): The identifier of the node.
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
Any: The result of the operation.
|
|
86
|
+
"""
|
|
87
|
+
async def delete_relationship(self, node_source_key: str, node_source_value: str, relation: str, node_target_key: str, node_target_value: str) -> Any:
|
|
88
|
+
"""Delete a relationship between two nodes in the graph.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
node_source_key (str): The key of the source node.
|
|
92
|
+
node_source_value (str): The identifier of the source node.
|
|
93
|
+
relation (str): The type of the relationship.
|
|
94
|
+
node_target_key (str): The key of the target node.
|
|
95
|
+
node_target_value (str): The identifier of the target node.
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
Any: The result of the operation.
|
|
99
|
+
"""
|
|
100
|
+
async def query(self, query: str, parameters: dict[str, Any] | None = None) -> list[dict[str, Any]]:
|
|
101
|
+
"""Query the graph store.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
query (str): The query to be executed.
|
|
105
|
+
parameters (dict[str, Any] | None, optional): The parameters of the query. Defaults to None.
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
list[dict[str, Any]]: The result of the query.
|
|
109
|
+
"""
|
|
110
|
+
async def traverse_graph(self, node_properties: dict[str, Any], extracted_node_properties: list[str] | None = None, extracted_relationship_properties: list[str] | None = None, depth: int = 3) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]:
|
|
111
|
+
'''Traverse graph from a node with specified properties, ignoring relationship\'s direction, up to a given depth.
|
|
112
|
+
|
|
113
|
+
Example:
|
|
114
|
+
```python
|
|
115
|
+
nodes, relationships = await graph_data_store.traverse_graph(
|
|
116
|
+
node_properties={"name": "John Doe"},
|
|
117
|
+
extracted_node_properties=["name", "age"],
|
|
118
|
+
extracted_relationship_properties=["since"],
|
|
119
|
+
depth=1
|
|
120
|
+
)
|
|
121
|
+
```
|
|
122
|
+
Means starting from the node with property `name` equal to "John Doe", traverse
|
|
123
|
+
the graph up to depth 1, extracting the `name` and `age` properties from nodes
|
|
124
|
+
and the `since` property from relationships.
|
|
125
|
+
|
|
126
|
+
```python
|
|
127
|
+
nodes, relationships = await graph_data_store.traverse_graph(
|
|
128
|
+
node_properties={"name": "John Doe"},
|
|
129
|
+
depth=2
|
|
130
|
+
)
|
|
131
|
+
```
|
|
132
|
+
Means starting from the node with property `name` equal to "John Doe", traverse
|
|
133
|
+
the graph up to depth 2, extracting all properties from nodes and relationships.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
node_properties (dict[str, Any]): The properties of the starting node.
|
|
137
|
+
extracted_node_properties (list[str] | None, optional): The properties to extract from nodes during
|
|
138
|
+
traversal. If None or empty list, all node properties will be returned. Defaults to None.
|
|
139
|
+
extracted_relationship_properties (list[str] | None, optional): The properties to extract from relationships
|
|
140
|
+
during traversal. If None or empty list, all relationship properties will be returned. Defaults to None.
|
|
141
|
+
depth (int, optional): The depth of traversal. Defaults to 3.
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
tuple[list[dict[str, Any]], list[dict[str, Any]]]: A tuple containing two lists:
|
|
145
|
+
- List of nodes with their extracted properties (including the source node).
|
|
146
|
+
- List of relationships with their extracted properties.
|
|
147
|
+
|
|
148
|
+
Example return value:
|
|
149
|
+
nodes = [
|
|
150
|
+
{
|
|
151
|
+
"id": 1001,
|
|
152
|
+
"labels": ["Person"],
|
|
153
|
+
"properties": {
|
|
154
|
+
"name": "John Doe",
|
|
155
|
+
"age": 30,
|
|
156
|
+
"occupation": "Engineer"
|
|
157
|
+
}
|
|
158
|
+
},
|
|
159
|
+
{
|
|
160
|
+
"id": 2001,
|
|
161
|
+
"labels": ["Company"],
|
|
162
|
+
"properties": {
|
|
163
|
+
"name": "TechCorp",
|
|
164
|
+
"industry": "Technology",
|
|
165
|
+
"employees": 500
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
]
|
|
169
|
+
|
|
170
|
+
relationships = [
|
|
171
|
+
{
|
|
172
|
+
"id": 5002,
|
|
173
|
+
"type": "FRIEND_OF",
|
|
174
|
+
"start_node": 1001,
|
|
175
|
+
"end_node": 1002,
|
|
176
|
+
"properties": {
|
|
177
|
+
"since": "2018-05-20",
|
|
178
|
+
"closeness": 8
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
]
|
|
182
|
+
|
|
183
|
+
Raises:
|
|
184
|
+
ValueError: If node_properties is empty or depth is less than 1.
|
|
185
|
+
'''
|
|
186
|
+
async def close(self) -> None:
|
|
187
|
+
"""Close the graph data store."""
|
|
188
|
+
async def get_nodes(self, label: str | None = None) -> list[dict[str, Any]]:
|
|
189
|
+
"""Get all nodes with optional label filter.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
label (str | None, optional): The label of the nodes. Defaults to None.
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
list[dict[str, Any]]: The result of the query.
|
|
196
|
+
"""
|
|
197
|
+
async def get_relationships(self, source_value: str | None = None, relation: str | None = None) -> list[dict[str, Any]]:
|
|
198
|
+
"""Get relationships with optional filters.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
source_value (str | None, optional): The source vertex identifier. Defaults to None.
|
|
202
|
+
relation (str | None, optional): The relationship type. Defaults to None.
|
|
203
|
+
|
|
204
|
+
Returns:
|
|
205
|
+
list[dict[str, Any]]: The result of the query.
|
|
206
|
+
"""
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from gllm_core.utils.retry import RetryConfig
|
|
3
|
+
from gllm_datastore.graph_data_store.graph_data_store import BaseGraphDataStore as BaseGraphDataStore
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
class Neo4jGraphDataStore(BaseGraphDataStore):
|
|
7
|
+
'''Implementation of BaseGraphDataStore for Neo4j.
|
|
8
|
+
|
|
9
|
+
This class provides an interface for graph-based Retrieval-Augmented Generation (RAG)
|
|
10
|
+
operations on Neo4j graph databases.
|
|
11
|
+
|
|
12
|
+
Attributes:
|
|
13
|
+
driver (Driver): The Neo4j driver.
|
|
14
|
+
|
|
15
|
+
Example:
|
|
16
|
+
```python
|
|
17
|
+
store = Neo4jGraphDataStore(
|
|
18
|
+
uri="bolt://localhost:7687",
|
|
19
|
+
user="neo4j",
|
|
20
|
+
password="password"
|
|
21
|
+
)
|
|
22
|
+
# Perform async operations
|
|
23
|
+
results = await store.query("MATCH (n) RETURN n")
|
|
24
|
+
|
|
25
|
+
# Create a node
|
|
26
|
+
node = await store.upsert_node("Person", "name", "John", {"age": 30})
|
|
27
|
+
```
|
|
28
|
+
'''
|
|
29
|
+
driver: Incomplete
|
|
30
|
+
retry_config: Incomplete
|
|
31
|
+
def __init__(self, uri: str, user: str, password: str, max_connection_pool_size: int = 100, retry_config: RetryConfig | None = None, **kwargs: Any) -> None:
|
|
32
|
+
"""Initialize Neo4jGraphDataStore.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
uri (str): The URI of the graph store.
|
|
36
|
+
user (str): The user of the graph store.
|
|
37
|
+
password (str): The password of the graph store.
|
|
38
|
+
max_connection_pool_size (int, optional): The maximum size of the connection pool. Defaults to 100.
|
|
39
|
+
retry_config (RetryConfig | None, optional): Configuration for retry behavior. Defaults to None.
|
|
40
|
+
If provided, query operations will be retried according to the specified RetryConfig parameters.
|
|
41
|
+
When a database operation fails with a retryable exception (e.g., neo4j.exceptions.ServiceUnavailable),
|
|
42
|
+
the operation will be automatically retried based on the retry policy defined in the configuration.
|
|
43
|
+
**kwargs (Any): Additional keyword arguments for the driver.
|
|
44
|
+
"""
|
|
45
|
+
async def upsert_node(self, label: str, identifier_key: str, identifier_value: str, properties: dict[str, Any] | None = None) -> Any:
|
|
46
|
+
"""Upsert a node in the graph.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
label (str): The label of the node.
|
|
50
|
+
identifier_key (str): The key of the identifier.
|
|
51
|
+
identifier_value (str): The value of the identifier.
|
|
52
|
+
properties (dict[str, Any] | None, optional): The properties of the node. Defaults to None.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
Any: The result of the operation.
|
|
56
|
+
"""
|
|
57
|
+
async def upsert_relationship(self, node_source_key: str, node_source_value: str, relation: str, node_target_key: str, node_target_value: str, properties: dict[str, Any] | None = None) -> Any:
|
|
58
|
+
"""Upsert a relationship between two nodes in the graph.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
node_source_key (str): The key of the source node.
|
|
62
|
+
node_source_value (str): The value of the source node.
|
|
63
|
+
relation (str): The type of the relationship.
|
|
64
|
+
node_target_key (str): The key of the target node.
|
|
65
|
+
node_target_value (str): The value of the target node.
|
|
66
|
+
properties (dict[str, Any] | None, optional): The properties of the relationship. Defaults to None.
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
Any: The result of the operation.
|
|
70
|
+
"""
|
|
71
|
+
async def delete_node(self, label: str, identifier_key: str, identifier_value: str) -> Any:
|
|
72
|
+
"""Delete a node from the graph.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
label (str): The label of the node.
|
|
76
|
+
identifier_key (str): The key of the identifier.
|
|
77
|
+
identifier_value (str): The identifier of the node.
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
Any: The result of the operation.
|
|
81
|
+
"""
|
|
82
|
+
async def delete_relationship(self, node_source_key: str, node_source_value: str, relation: str, node_target_key: str, node_target_value: str) -> Any:
|
|
83
|
+
"""Delete a relationship between two nodes in the graph.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
node_source_key (str): The key of the source node.
|
|
87
|
+
node_source_value (str): The identifier of the source node.
|
|
88
|
+
relation (str): The type of the relationship.
|
|
89
|
+
node_target_key (str): The key of the target node.
|
|
90
|
+
node_target_value (str): The identifier of the target node.
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
Any: The result of the operation.
|
|
94
|
+
"""
|
|
95
|
+
async def query(self, query: str, parameters: dict[str, Any] | None = None) -> list[dict[str, Any]]:
|
|
96
|
+
"""Query the graph store.
|
|
97
|
+
|
|
98
|
+
Args:
|
|
99
|
+
query (str): The query to be executed.
|
|
100
|
+
parameters (dict[str, Any] | None, optional): The parameters of the query. Defaults to None.
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
list[dict[str, Any]]: The result of the query.
|
|
104
|
+
"""
|
|
105
|
+
async def traverse_graph(self, node_properties: dict[str, Any], extracted_node_properties: list[str] | None = None, extracted_relationship_properties: list[str] | None = None, depth: int = 3) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]:
|
|
106
|
+
'''Traverse graph from a node with specified properties, ignoring relationship\'s direction, up to a given depth.
|
|
107
|
+
|
|
108
|
+
Example:
|
|
109
|
+
```python
|
|
110
|
+
nodes, relationships = await graph_data_store.traverse_graph(
|
|
111
|
+
node_properties={"name": "John Doe"},
|
|
112
|
+
extracted_node_properties=["name", "age"],
|
|
113
|
+
extracted_relationship_properties=["since"],
|
|
114
|
+
depth=1
|
|
115
|
+
)
|
|
116
|
+
```
|
|
117
|
+
Means starting from the node with property `name` equal to "John Doe", traverse
|
|
118
|
+
the graph up to depth 1, extracting the `name` and `age` properties from nodes
|
|
119
|
+
and the `since` property from relationships.
|
|
120
|
+
|
|
121
|
+
```python
|
|
122
|
+
nodes, relationships = await graph_data_store.traverse_graph(
|
|
123
|
+
node_properties={"name": "John Doe"},
|
|
124
|
+
depth=2
|
|
125
|
+
)
|
|
126
|
+
```
|
|
127
|
+
Means starting from the node with property `name` equal to "John Doe", traverse
|
|
128
|
+
the graph up to depth 2, extracting all properties from nodes and relationships.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
node_properties (dict[str, Any]): The properties of the starting node.
|
|
132
|
+
extracted_node_properties (list[str] | None, optional): The properties to extract from nodes during
|
|
133
|
+
traversal. If None or empty list, all node properties will be returned. Defaults to None.
|
|
134
|
+
extracted_relationship_properties (list[str] | None, optional): The properties to extract from relationships
|
|
135
|
+
during traversal. If None or empty list, all relationship properties will be returned. Defaults to None.
|
|
136
|
+
depth (int, optional): The depth of traversal. Defaults to 3.
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
tuple[list[dict[str, Any]], list[dict[str, Any]]]: A tuple containing two lists:
|
|
140
|
+
- List of nodes with their extracted properties (including the source node).
|
|
141
|
+
- List of relationships with their extracted properties.
|
|
142
|
+
|
|
143
|
+
Example return value:
|
|
144
|
+
nodes = [
|
|
145
|
+
{
|
|
146
|
+
"id": 1001,
|
|
147
|
+
"labels": ["Person"],
|
|
148
|
+
"properties": {
|
|
149
|
+
"name": "John Doe",
|
|
150
|
+
"age": 30,
|
|
151
|
+
"occupation": "Engineer"
|
|
152
|
+
}
|
|
153
|
+
},
|
|
154
|
+
{
|
|
155
|
+
"id": 2001,
|
|
156
|
+
"labels": ["Company"],
|
|
157
|
+
"properties": {
|
|
158
|
+
"name": "TechCorp",
|
|
159
|
+
"industry": "Technology",
|
|
160
|
+
"employees": 500
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
]
|
|
164
|
+
|
|
165
|
+
relationships = [
|
|
166
|
+
{
|
|
167
|
+
"id": 5002,
|
|
168
|
+
"type": "FRIEND_OF",
|
|
169
|
+
"start_node": 1001,
|
|
170
|
+
"end_node": 1002,
|
|
171
|
+
"properties": {
|
|
172
|
+
"since": "2018-05-20",
|
|
173
|
+
"closeness": 8
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
]
|
|
177
|
+
|
|
178
|
+
Raises:
|
|
179
|
+
ValueError: If node_properties is empty or depth is less than 1.
|
|
180
|
+
'''
|
|
181
|
+
async def close(self) -> None:
|
|
182
|
+
"""Close the graph data store."""
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from enum import StrEnum
|
|
2
|
+
from gllm_core.schema.graph import Edge, Node
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
|
|
5
|
+
class RelationshipDirection(StrEnum):
|
|
6
|
+
"""Direction for relationship traversal."""
|
|
7
|
+
OUTGOING: str
|
|
8
|
+
INCOMING: str
|
|
9
|
+
BOTH: str
|
|
10
|
+
|
|
11
|
+
class SearchPosition(StrEnum):
|
|
12
|
+
"""Position in a graph pattern for constrained search."""
|
|
13
|
+
SOURCE: str
|
|
14
|
+
RELATIONSHIP: str
|
|
15
|
+
TARGET: str
|
|
16
|
+
|
|
17
|
+
class Triplet(BaseModel):
|
|
18
|
+
"""Graph triplet pattern (source-relationship-target).
|
|
19
|
+
|
|
20
|
+
Attributes:
|
|
21
|
+
source (Node): Source node.
|
|
22
|
+
relationship (Edge): A directed relationship from source to target.
|
|
23
|
+
target (Node): Target node.
|
|
24
|
+
"""
|
|
25
|
+
source: Node
|
|
26
|
+
relationship: Edge
|
|
27
|
+
target: Node
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
from gllm_datastore.graph_data_store.utils.light_rag_em_invoker_adapter import LightRAGEMInvokerAdapter as LightRAGEMInvokerAdapter
|
|
2
|
+
from gllm_datastore.graph_data_store.utils.light_rag_lm_invoker_adapter import LightRAGLMInvokerAdapter as LightRAGLMInvokerAdapter
|
|
3
|
+
from gllm_datastore.graph_data_store.utils.llama_index_em_invoker_adapter import LlamaIndexEMInvokerAdapter as LlamaIndexEMInvokerAdapter
|
|
4
|
+
from gllm_datastore.graph_data_store.utils.llama_index_lm_invoker_adapter import LlamaIndexLMInvokerAdapter as LlamaIndexLMInvokerAdapter
|
|
5
|
+
|
|
6
|
+
__all__ = ['LightRAGEMInvokerAdapter', 'LightRAGLMInvokerAdapter', 'LlamaIndexEMInvokerAdapter', 'LlamaIndexLMInvokerAdapter']
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
class LightRAGKeys:
|
|
2
|
+
"""Keys used in LightRAG indexer."""
|
|
3
|
+
ENTITY_TYPE: str
|
|
4
|
+
ENTITY_ID: str
|
|
5
|
+
SOURCE_ID: str
|
|
6
|
+
ROLE: str
|
|
7
|
+
CONTENT: str
|
|
8
|
+
|
|
9
|
+
class LightRAGConstants:
|
|
10
|
+
"""Constants used in LightRAG indexer."""
|
|
11
|
+
CHUNK_TYPE: str
|
|
12
|
+
DEVELOPER_ROLE: str
|
|
13
|
+
EMBEDDING_PAYLOAD_TEST: str
|
|
14
|
+
FILE_TYPE: str
|
|
15
|
+
|
|
16
|
+
class LightRAGPostgresStorageConstants:
|
|
17
|
+
"""Constants used in LightRAG indexer with PostgreSQL storage."""
|
|
18
|
+
DOC_STATUS_STORAGE: str
|
|
19
|
+
GRAPH_STORAGE: str
|
|
20
|
+
KV_STORAGE: str
|
|
21
|
+
VECTOR_STORAGE: str
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from gllm_datastore.graph_data_store.utils.constants import LightRAGConstants as LightRAGConstants
|
|
3
|
+
from gllm_inference.em_invoker.em_invoker import BaseEMInvoker
|
|
4
|
+
from lightrag.base import EmbeddingFunc
|
|
5
|
+
|
|
6
|
+
class LightRAGEMInvokerAdapter(EmbeddingFunc):
|
|
7
|
+
"""Adapter for embedding model invokers to work with LightRAG.
|
|
8
|
+
|
|
9
|
+
This adapter wraps BaseEMInvoker instances to make them compatible
|
|
10
|
+
with LightRAG's expected interface.
|
|
11
|
+
|
|
12
|
+
Attributes:
|
|
13
|
+
_em_invoker (BaseEMInvoker): The EM invoker to use.
|
|
14
|
+
func (callable): The embedding function.
|
|
15
|
+
embedding_dim (int): The embedding dimension. Defaults to 0.
|
|
16
|
+
"""
|
|
17
|
+
func: Incomplete
|
|
18
|
+
embedding_dim: int
|
|
19
|
+
def __init__(self, em_invoker: BaseEMInvoker) -> None:
|
|
20
|
+
"""Initialize the LightRAGEMInvokerAdapter.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
em_invoker (BaseEMInvoker): The EM invoker to use.
|
|
24
|
+
"""
|
|
25
|
+
async def ensure_initialized(self) -> None:
|
|
26
|
+
"""Ensure that the adapter is initialized.
|
|
27
|
+
|
|
28
|
+
This asynchronous method ensures that the embedding dimension is determined.
|
|
29
|
+
If the embedding dimension is 0, it will determine the dimension by calling
|
|
30
|
+
the embedding invoker with a test input. Raises an error if initialization fails.
|
|
31
|
+
|
|
32
|
+
Raises:
|
|
33
|
+
RuntimeError: If embedding dimension cannot be determined after initialization.
|
|
34
|
+
"""
|
|
35
|
+
def __deepcopy__(self, memo: dict) -> LightRAGEMInvokerAdapter:
|
|
36
|
+
"""Custom deepcopy implementation to handle non-serializable objects.
|
|
37
|
+
|
|
38
|
+
This method is called when copy.deepcopy() is invoked on this object.
|
|
39
|
+
We create a new instance without deep-copying the invoker object
|
|
40
|
+
which may contain non-serializable components.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
memo (dict): Memoization dictionary for deepcopy process
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
LightRAGEMInvokerAdapter: A new instance with the same invoker reference
|
|
47
|
+
"""
|
|
48
|
+
async def __call__(self, input: str | list[str]) -> list[list[float]]:
|
|
49
|
+
"""Make the adapter callable for compatibility with LightRAG.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
input (str | list[str]): The input text or list of texts to embed.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
list[list[float]]: The embeddings for the input texts.
|
|
56
|
+
"""
|