gllm-datastore-binary 0.5.45__cp311-cp311-macosx_13_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of gllm-datastore-binary might be problematic. Click here for more details.
- gllm_datastore/__init__.pyi +0 -0
- gllm_datastore/cache/__init__.pyi +4 -0
- gllm_datastore/cache/base.pyi +84 -0
- gllm_datastore/cache/cache.pyi +137 -0
- gllm_datastore/cache/hybrid_cache/__init__.pyi +5 -0
- gllm_datastore/cache/hybrid_cache/file_system_hybrid_cache.pyi +50 -0
- gllm_datastore/cache/hybrid_cache/hybrid_cache.pyi +115 -0
- gllm_datastore/cache/hybrid_cache/in_memory_hybrid_cache.pyi +29 -0
- gllm_datastore/cache/hybrid_cache/key_matcher/__init__.pyi +5 -0
- gllm_datastore/cache/hybrid_cache/key_matcher/exact_key_matcher.pyi +44 -0
- gllm_datastore/cache/hybrid_cache/key_matcher/fuzzy_key_matcher.pyi +70 -0
- gllm_datastore/cache/hybrid_cache/key_matcher/key_matcher.pyi +60 -0
- gllm_datastore/cache/hybrid_cache/key_matcher/semantic_key_matcher.pyi +93 -0
- gllm_datastore/cache/hybrid_cache/redis_hybrid_cache.pyi +34 -0
- gllm_datastore/cache/hybrid_cache/utils.pyi +36 -0
- gllm_datastore/cache/utils.pyi +34 -0
- gllm_datastore/cache/vector_cache/__init__.pyi +0 -0
- gllm_datastore/cache/vector_cache/eviction_manager/__init__.pyi +0 -0
- gllm_datastore/cache/vector_cache/eviction_manager/asyncio_eviction_manager.pyi +48 -0
- gllm_datastore/cache/vector_cache/eviction_manager/eviction_manager.pyi +38 -0
- gllm_datastore/cache/vector_cache/eviction_strategy/__init__.pyi +0 -0
- gllm_datastore/cache/vector_cache/eviction_strategy/eviction_strategy.pyi +34 -0
- gllm_datastore/cache/vector_cache/eviction_strategy/ttl_eviction_strategy.pyi +34 -0
- gllm_datastore/cache/vector_cache/vector_cache.pyi +99 -0
- gllm_datastore/constants.pyi +66 -0
- gllm_datastore/core/__init__.pyi +7 -0
- gllm_datastore/core/capabilities/__init__.pyi +5 -0
- gllm_datastore/core/capabilities/fulltext_capability.pyi +73 -0
- gllm_datastore/core/capabilities/graph_capability.pyi +70 -0
- gllm_datastore/core/capabilities/vector_capability.pyi +90 -0
- gllm_datastore/core/filters/__init__.pyi +4 -0
- gllm_datastore/core/filters/filter.pyi +340 -0
- gllm_datastore/core/filters/schema.pyi +149 -0
- gllm_datastore/data_store/__init__.pyi +7 -0
- gllm_datastore/data_store/base.pyi +138 -0
- gllm_datastore/data_store/chroma/__init__.pyi +4 -0
- gllm_datastore/data_store/chroma/_chroma_import.pyi +13 -0
- gllm_datastore/data_store/chroma/data_store.pyi +202 -0
- gllm_datastore/data_store/chroma/fulltext.pyi +134 -0
- gllm_datastore/data_store/chroma/query.pyi +266 -0
- gllm_datastore/data_store/chroma/query_translator.pyi +41 -0
- gllm_datastore/data_store/chroma/vector.pyi +197 -0
- gllm_datastore/data_store/elasticsearch/__init__.pyi +5 -0
- gllm_datastore/data_store/elasticsearch/data_store.pyi +119 -0
- gllm_datastore/data_store/elasticsearch/fulltext.pyi +237 -0
- gllm_datastore/data_store/elasticsearch/query.pyi +114 -0
- gllm_datastore/data_store/elasticsearch/vector.pyi +179 -0
- gllm_datastore/data_store/exceptions.pyi +35 -0
- gllm_datastore/data_store/in_memory/__init__.pyi +5 -0
- gllm_datastore/data_store/in_memory/data_store.pyi +71 -0
- gllm_datastore/data_store/in_memory/fulltext.pyi +131 -0
- gllm_datastore/data_store/in_memory/query.pyi +175 -0
- gllm_datastore/data_store/in_memory/vector.pyi +174 -0
- gllm_datastore/data_store/redis/__init__.pyi +5 -0
- gllm_datastore/data_store/redis/data_store.pyi +154 -0
- gllm_datastore/data_store/redis/fulltext.pyi +128 -0
- gllm_datastore/data_store/redis/query.pyi +428 -0
- gllm_datastore/data_store/redis/query_translator.pyi +37 -0
- gllm_datastore/data_store/redis/vector.pyi +131 -0
- gllm_datastore/encryptor/__init__.pyi +4 -0
- gllm_datastore/encryptor/aes_gcm_encryptor.pyi +45 -0
- gllm_datastore/encryptor/encryptor.pyi +52 -0
- gllm_datastore/encryptor/key_ring/__init__.pyi +3 -0
- gllm_datastore/encryptor/key_ring/in_memory_key_ring.pyi +52 -0
- gllm_datastore/encryptor/key_ring/key_ring.pyi +45 -0
- gllm_datastore/encryptor/key_rotating_encryptor.pyi +60 -0
- gllm_datastore/graph_data_store/__init__.pyi +6 -0
- gllm_datastore/graph_data_store/graph_data_store.pyi +151 -0
- gllm_datastore/graph_data_store/graph_rag_data_store.pyi +29 -0
- gllm_datastore/graph_data_store/light_rag_data_store.pyi +93 -0
- gllm_datastore/graph_data_store/light_rag_postgres_data_store.pyi +96 -0
- gllm_datastore/graph_data_store/llama_index_graph_rag_data_store.pyi +49 -0
- gllm_datastore/graph_data_store/llama_index_neo4j_graph_rag_data_store.pyi +78 -0
- gllm_datastore/graph_data_store/nebula_graph_data_store.pyi +206 -0
- gllm_datastore/graph_data_store/neo4j_graph_data_store.pyi +182 -0
- gllm_datastore/graph_data_store/utils/__init__.pyi +6 -0
- gllm_datastore/graph_data_store/utils/constants.pyi +21 -0
- gllm_datastore/graph_data_store/utils/light_rag_em_invoker_adapter.pyi +56 -0
- gllm_datastore/graph_data_store/utils/light_rag_lm_invoker_adapter.pyi +43 -0
- gllm_datastore/graph_data_store/utils/llama_index_em_invoker_adapter.pyi +45 -0
- gllm_datastore/graph_data_store/utils/llama_index_lm_invoker_adapter.pyi +169 -0
- gllm_datastore/sql_data_store/__init__.pyi +4 -0
- gllm_datastore/sql_data_store/adapter/__init__.pyi +0 -0
- gllm_datastore/sql_data_store/adapter/sqlalchemy_adapter.pyi +38 -0
- gllm_datastore/sql_data_store/constants.pyi +6 -0
- gllm_datastore/sql_data_store/sql_data_store.pyi +86 -0
- gllm_datastore/sql_data_store/sqlalchemy_sql_data_store.pyi +216 -0
- gllm_datastore/sql_data_store/types.pyi +31 -0
- gllm_datastore/utils/__init__.pyi +6 -0
- gllm_datastore/utils/converter.pyi +51 -0
- gllm_datastore/utils/dict.pyi +21 -0
- gllm_datastore/utils/ttl.pyi +25 -0
- gllm_datastore/utils/types.pyi +32 -0
- gllm_datastore/vector_data_store/__init__.pyi +6 -0
- gllm_datastore/vector_data_store/chroma_vector_data_store.pyi +259 -0
- gllm_datastore/vector_data_store/elasticsearch_vector_data_store.pyi +357 -0
- gllm_datastore/vector_data_store/in_memory_vector_data_store.pyi +179 -0
- gllm_datastore/vector_data_store/mixin/__init__.pyi +0 -0
- gllm_datastore/vector_data_store/mixin/cache_compatible_mixin.pyi +145 -0
- gllm_datastore/vector_data_store/redis_vector_data_store.pyi +191 -0
- gllm_datastore/vector_data_store/vector_data_store.pyi +146 -0
- gllm_datastore.build/.gitignore +1 -0
- gllm_datastore.cpython-311-darwin.so +0 -0
- gllm_datastore.pyi +156 -0
- gllm_datastore_binary-0.5.45.dist-info/METADATA +178 -0
- gllm_datastore_binary-0.5.45.dist-info/RECORD +108 -0
- gllm_datastore_binary-0.5.45.dist-info/WHEEL +5 -0
- gllm_datastore_binary-0.5.45.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from gllm_core.schema import Chunk as Chunk
|
|
3
|
+
from gllm_datastore.graph_data_store.graph_rag_data_store import BaseGraphRAGDataStore as BaseGraphRAGDataStore
|
|
4
|
+
from gllm_datastore.graph_data_store.utils.constants import LightRAGConstants as LightRAGConstants, LightRAGKeys as LightRAGKeys
|
|
5
|
+
from gllm_datastore.graph_data_store.utils.light_rag_em_invoker_adapter import LightRAGEMInvokerAdapter as LightRAGEMInvokerAdapter
|
|
6
|
+
from lightrag import LightRAG
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
class BaseLightRAGDataStore(BaseGraphRAGDataStore):
|
|
10
|
+
"""LightRAG data store base class.
|
|
11
|
+
|
|
12
|
+
This class provides an abstract base class for the BaseGraphRAGDataStore interface
|
|
13
|
+
using LightRAG as the underlying technology. It handles indexing files
|
|
14
|
+
into a graph database, creating relationships between files and chunks,
|
|
15
|
+
and provides methods for deleting files and chunks from the graph.
|
|
16
|
+
|
|
17
|
+
Please use LightRAGPostgresDataStore or other concrete implementations instead.
|
|
18
|
+
|
|
19
|
+
To implement a concrete data store, inherit from this class and implement the
|
|
20
|
+
abstract methods.
|
|
21
|
+
|
|
22
|
+
Attributes:
|
|
23
|
+
instance (LightRAG): The LightRAG instance to use.
|
|
24
|
+
is_initialized (bool): Whether the data store is initialized.
|
|
25
|
+
"""
|
|
26
|
+
instance: Incomplete
|
|
27
|
+
is_initialized: bool
|
|
28
|
+
def __init__(self, instance: LightRAG) -> None:
|
|
29
|
+
"""Initialize the LightRAG data store.
|
|
30
|
+
|
|
31
|
+
This is an abstract base class and cannot be instantiated directly.
|
|
32
|
+
Use LightRAGPostgresDataStore or other concrete implementations instead.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
instance (LightRAG): The LightRAG instance to use for indexing.
|
|
36
|
+
|
|
37
|
+
Raises:
|
|
38
|
+
TypeError: If attempting to instantiate BaseLightRAGDataStore directly.
|
|
39
|
+
"""
|
|
40
|
+
async def ensure_initialized(self) -> None:
|
|
41
|
+
"""Ensure that the LightRAG data store is initialized.
|
|
42
|
+
|
|
43
|
+
This asynchronous method ensures that the LightRAG data store is initialized.
|
|
44
|
+
If the data store is not initialized, it will initialize it.
|
|
45
|
+
"""
|
|
46
|
+
async def map_file_id_to_chunk_ids_using_graph(self, file_id: str, chunk_ids: list[str]) -> None:
|
|
47
|
+
"""Create file and chunk nodes in the graph and establish relationships.
|
|
48
|
+
|
|
49
|
+
This asynchronous method creates a file node and multiple chunk nodes
|
|
50
|
+
in the graph database, then establishes relationships between the file
|
|
51
|
+
and its chunks. The relationships are necessary for retaining the relationships
|
|
52
|
+
between the file and its chunks when deleting the file.
|
|
53
|
+
|
|
54
|
+
Args:
|
|
55
|
+
file_id (str): The ID of the file to create in the graph.
|
|
56
|
+
chunk_ids (list[str]): List of chunk IDs that belong to the file.
|
|
57
|
+
"""
|
|
58
|
+
async def insert(self, chunks: list[Chunk]) -> None:
|
|
59
|
+
"""Insert a chunk into the LightRAG data store.
|
|
60
|
+
|
|
61
|
+
This asynchronous method inserts a chunk into the LightRAG data store.
|
|
62
|
+
If the data store is not initialized, it will initialize it.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
chunks (list[Chunk]): The chunks to insert.
|
|
66
|
+
"""
|
|
67
|
+
async def query(self, query: str, **kwargs: Any) -> Any:
|
|
68
|
+
"""Query the LightRAG data store.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
query (str): The query to be executed.
|
|
72
|
+
**kwargs (Any): Additional keyword arguments.
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
Any: The result of the query.
|
|
76
|
+
"""
|
|
77
|
+
async def delete(self, chunk_id: str) -> None:
|
|
78
|
+
"""Delete a chunk from the LightRAG data store.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
chunk_id (str): The ID of the chunk to delete.
|
|
82
|
+
"""
|
|
83
|
+
async def delete_by_document_id(self, document_id: str, **kwargs: Any) -> None:
|
|
84
|
+
"""Delete a document/file and all its associated chunks from the LightRAG data store.
|
|
85
|
+
|
|
86
|
+
This asynchronous method retrieves all chunks associated with a document/file,
|
|
87
|
+
deletes each chunk from both the LightRAG system and the graph database,
|
|
88
|
+
and finally deletes the document/file node itself.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
document_id (str): The ID of the document to delete.
|
|
92
|
+
**kwargs (Any): Additional keyword arguments.
|
|
93
|
+
"""
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from contextlib import contextmanager
|
|
3
|
+
from gllm_datastore.graph_data_store.light_rag_data_store import BaseLightRAGDataStore as BaseLightRAGDataStore
|
|
4
|
+
from gllm_datastore.graph_data_store.utils.constants import LightRAGPostgresStorageConstants as LightRAGPostgresStorageConstants
|
|
5
|
+
from gllm_datastore.graph_data_store.utils.light_rag_em_invoker_adapter import LightRAGEMInvokerAdapter as LightRAGEMInvokerAdapter
|
|
6
|
+
from gllm_datastore.graph_data_store.utils.light_rag_lm_invoker_adapter import LightRAGLMInvokerAdapter as LightRAGLMInvokerAdapter
|
|
7
|
+
from gllm_inference.em_invoker.em_invoker import BaseEMInvoker
|
|
8
|
+
from gllm_inference.lm_invoker.lm_invoker import BaseLMInvoker
|
|
9
|
+
from lightrag import LightRAG
|
|
10
|
+
from pydantic import BaseModel
|
|
11
|
+
from typing import Any, Generator
|
|
12
|
+
|
|
13
|
+
class PostgresDBConfig(BaseModel):
|
|
14
|
+
"""Pydantic model containing PostgreSQL configuration parameters."""
|
|
15
|
+
host: str
|
|
16
|
+
port: int
|
|
17
|
+
user: str
|
|
18
|
+
password: str
|
|
19
|
+
database: str
|
|
20
|
+
workspace: str
|
|
21
|
+
|
|
22
|
+
@contextmanager
|
|
23
|
+
def postgres_config_context(config: PostgresDBConfig) -> Generator[None, None, None]:
|
|
24
|
+
"""Context manager to temporarily set postgres config.
|
|
25
|
+
|
|
26
|
+
This context manager is used to temporarily set the environment variables
|
|
27
|
+
for PostgreSQL configuration. It is used to set the environment variables
|
|
28
|
+
for the duration of the context manager's block as it is required by LightRAG.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
config (PostgresDBConfig): Pydantic model containing PostgreSQL configuration parameters.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Generator[None, None, None]: Generator object that can be used in a context manager.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
class LightRAGPostgresDataStore(BaseLightRAGDataStore):
|
|
38
|
+
'''Data store implementation for LightRAG-based graph RAG using PostgreSQL.
|
|
39
|
+
|
|
40
|
+
This class extends the LightRAGDataStore to use PostgreSQL as the graph database,
|
|
41
|
+
key-value store, and vector database.
|
|
42
|
+
|
|
43
|
+
To use this data store, please ensure that you have a PostgreSQL with AGE and PGVector extensions installed.
|
|
44
|
+
You can use the following docker run command to start a PostgreSQL container with AGE and PGVector extensions:
|
|
45
|
+
|
|
46
|
+
```bash
|
|
47
|
+
docker run -p 5455:5432 -d --name postgres-LightRag shangor/postgres-for-rag:v1.0 sh -c "service postgresql start && sleep infinity"
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
Example:
|
|
51
|
+
```python
|
|
52
|
+
from gllm_inference.em_invoker import OpenAIEMInvoker
|
|
53
|
+
from gllm_inference.lm_invoker import OpenAILMInvoker
|
|
54
|
+
from gllm_datastore.graph_data_store.light_rag_postgres_data_store import LightRAGPostgresDataStore
|
|
55
|
+
|
|
56
|
+
# Create the indexer
|
|
57
|
+
data_store = await LightRAGPostgresDataStore(
|
|
58
|
+
lm_invoker=OpenAILMInvoker(model_name="gpt-4o-mini"),
|
|
59
|
+
em_invoker=OpenAIEMInvoker(model_name="text-embedding-3-small"),
|
|
60
|
+
postgres_db_user="rag",
|
|
61
|
+
postgres_db_password="rag",
|
|
62
|
+
postgres_db_name="rag",
|
|
63
|
+
postgres_db_host="localhost",
|
|
64
|
+
postgres_db_port=5455,
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
# Retrieve using LightRAG instance
|
|
68
|
+
await data_store.query("What is AI?")
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
Attributes:
|
|
72
|
+
instance (LightRAG): The LightRAG instance used for indexing and querying.
|
|
73
|
+
lm_invoker_adapter (LightRAGLMInvokerAdapter): The adapter for the LM invoker.
|
|
74
|
+
em_invoker_adapter (LightRAGEMInvokerAdapter): The adapter for the EM invoker.
|
|
75
|
+
postgres_config (PostgresDBConfig): Pydantic model containing PostgreSQL configuration parameters.
|
|
76
|
+
'''
|
|
77
|
+
lm_invoker_adapter: Incomplete
|
|
78
|
+
em_invoker_adapter: Incomplete
|
|
79
|
+
postgres_config: Incomplete
|
|
80
|
+
def __init__(self, lm_invoker: BaseLMInvoker, em_invoker: BaseEMInvoker, postgres_db_host: str = 'localhost', postgres_db_port: int = 5432, postgres_db_user: str = 'postgres', postgres_db_password: str = 'password', postgres_db_name: str = 'postgres', postgres_db_workspace: str = 'default', use_cache: bool = False, lm_invoke_kwargs: dict[str, Any] | None = None, instance: LightRAG | None = None, **kwargs: Any) -> None:
|
|
81
|
+
'''Initialize the LightRAGPostgresIndexer.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
lm_invoker (BaseLMInvoker): The LM invoker to use.
|
|
85
|
+
em_invoker (BaseEMInvoker): The EM invoker to use.
|
|
86
|
+
postgres_db_host (str, optional): The host for the PostgreSQL database. Defaults to "localhost".
|
|
87
|
+
postgres_db_port (int, optional): The port for the PostgreSQL database. Defaults to 5432.
|
|
88
|
+
postgres_db_user (str, optional): The user for the PostgreSQL database. Defaults to "postgres".
|
|
89
|
+
postgres_db_password (str, optional): The password for the PostgreSQL database. Defaults to "password".
|
|
90
|
+
postgres_db_name (str, optional): The name for the PostgreSQL database. Defaults to "postgres".
|
|
91
|
+
postgres_db_workspace (str, optional): The workspace for the PostgreSQL database. Defaults to "default".
|
|
92
|
+
use_cache (bool, optional): Whether to enable caching for the LightRAG instance. Defaults to False.
|
|
93
|
+
lm_invoke_kwargs (dict[str, Any] | None, optional): Keyword arguments for the LM invoker. Defaults to None.
|
|
94
|
+
instance (LightRAG | None, optional): A configured LightRAG instance to use. Defaults to None.
|
|
95
|
+
**kwargs (Any): Additional keyword arguments.
|
|
96
|
+
'''
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from abc import ABC
|
|
3
|
+
from gllm_datastore.graph_data_store.graph_rag_data_store import BaseGraphRAGDataStore as BaseGraphRAGDataStore
|
|
4
|
+
from gllm_datastore.graph_data_store.utils import LlamaIndexEMInvokerAdapter as LlamaIndexEMInvokerAdapter, LlamaIndexLMInvokerAdapter as LlamaIndexLMInvokerAdapter
|
|
5
|
+
from gllm_inference.em_invoker.em_invoker import BaseEMInvoker
|
|
6
|
+
from gllm_inference.lm_invoker.lm_invoker import BaseLMInvoker
|
|
7
|
+
from llama_index.core.base.embeddings.base import BaseEmbedding
|
|
8
|
+
from llama_index.core.graph_stores.types import PropertyGraphStore
|
|
9
|
+
from llama_index.core.llms import LLM
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
class LlamaIndexGraphRAGDataStore(PropertyGraphStore, BaseGraphRAGDataStore, ABC):
|
|
13
|
+
"""Abstract base class for a LlamaIndex graph RAG data store.
|
|
14
|
+
|
|
15
|
+
This class provides a common interface for LlamaIndex-based graph RAG data stores.
|
|
16
|
+
It handles conversion from GLLM invokers to LlamaIndex LLM and embedding models.
|
|
17
|
+
|
|
18
|
+
Attributes:
|
|
19
|
+
lm_invoker (BaseLMInvoker | None): The GLLM language model invoker.
|
|
20
|
+
em_invoker (BaseEMInvoker | None): The GLLM embedding model invoker.
|
|
21
|
+
llm (LLM | None): The LlamaIndex LLM instance (converted from lm_invoker if provided).
|
|
22
|
+
embed_model (BaseEmbedding | None): The LlamaIndex embedding instance (converted from em_invoker if provided).
|
|
23
|
+
"""
|
|
24
|
+
lm_invoker: Incomplete
|
|
25
|
+
em_invoker: Incomplete
|
|
26
|
+
llm: LLM | None
|
|
27
|
+
embed_model: BaseEmbedding | None
|
|
28
|
+
def __init__(self, lm_invoker: BaseLMInvoker | None = None, em_invoker: BaseEMInvoker | None = None, **kwargs: Any) -> None:
|
|
29
|
+
"""Initialize the LlamaIndexGraphRAGDataStore.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
lm_invoker (BaseLMInvoker | None, optional): GLLM language model invoker.
|
|
33
|
+
If provided, it will be automatically converted to a LlamaIndex LLM instance
|
|
34
|
+
using LlamaIndexLMInvokerAdapter. Defaults to None.
|
|
35
|
+
em_invoker (BaseEMInvoker | None, optional): GLLM embedding model invoker.
|
|
36
|
+
If provided, it will be automatically converted to a LlamaIndex BaseEmbedding instance
|
|
37
|
+
using LlamaIndexEMInvokerAdapter. Defaults to None.
|
|
38
|
+
**kwargs (Any): Additional keyword arguments passed to PropertyGraphStore.
|
|
39
|
+
"""
|
|
40
|
+
async def query(self, query: str, **kwargs: Any) -> Any:
|
|
41
|
+
"""Query the graph RAG data store.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
query (str): The query to be executed.
|
|
45
|
+
**kwargs (Any): Additional keyword arguments.
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
Any: The result of the query.
|
|
49
|
+
"""
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from gllm_datastore.graph_data_store.llama_index_graph_rag_data_store import LlamaIndexGraphRAGDataStore as LlamaIndexGraphRAGDataStore
|
|
3
|
+
from gllm_inference.em_invoker.em_invoker import BaseEMInvoker
|
|
4
|
+
from gllm_inference.lm_invoker.lm_invoker import BaseLMInvoker
|
|
5
|
+
from llama_index.graph_stores.neo4j import Neo4jPropertyGraphStore
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
class LlamaIndexNeo4jGraphRAGDataStore(LlamaIndexGraphRAGDataStore, Neo4jPropertyGraphStore):
|
|
9
|
+
'''Graph RAG data store for Neo4j.
|
|
10
|
+
|
|
11
|
+
This class extends the Neo4jPropertyGraphStore class from LlamaIndex.
|
|
12
|
+
This class provides an interface for graph-based Retrieval-Augmented Generation (RAG)
|
|
13
|
+
operations on Neo4j graph databases.
|
|
14
|
+
|
|
15
|
+
Attributes:
|
|
16
|
+
neo4j_version_tuple (tuple[int, ...]): The Neo4j version tuple.
|
|
17
|
+
lm_invoker (BaseLMInvoker | None): The GLLM language model invoker (inherited from parent).
|
|
18
|
+
em_invoker (BaseEMInvoker | None): The GLLM embedding model invoker (inherited from parent).
|
|
19
|
+
llm (LLM | None): The LlamaIndex LLM instance (converted from lm_invoker, inherited from parent).
|
|
20
|
+
embed_model (BaseEmbedding | None): The LlamaIndex embedding instance
|
|
21
|
+
(converted from em_invoker, inherited from parent).
|
|
22
|
+
|
|
23
|
+
Example:
|
|
24
|
+
```python
|
|
25
|
+
# Option 1: Use with GLLM invokers (recommended)
|
|
26
|
+
from gllm_inference.builder import build_lm_invoker, build_em_invoker
|
|
27
|
+
|
|
28
|
+
lm_invoker = build_lm_invoker(model_id="openai/gpt-4o-mini")
|
|
29
|
+
em_invoker = build_em_invoker(model_id="openai/text-embedding-3-small")
|
|
30
|
+
|
|
31
|
+
store = LlamaIndexNeo4jGraphRAGDataStore(
|
|
32
|
+
url="bolt://localhost:7687",
|
|
33
|
+
username="neo4j",
|
|
34
|
+
password="password",
|
|
35
|
+
lm_invoker=lm_invoker, # Optional: Auto-converted to LlamaIndex LLM
|
|
36
|
+
em_invoker=em_invoker, # Optional: Auto-converted to LlamaIndex Embedding
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
# Option 2: Use with LlamaIndex LLM/Embeddings directly
|
|
40
|
+
from llama_index.llms.openai import OpenAI
|
|
41
|
+
from llama_index.embeddings.openai import OpenAIEmbedding
|
|
42
|
+
|
|
43
|
+
store = LlamaIndexNeo4jGraphRAGDataStore(
|
|
44
|
+
url="bolt://localhost:7687",
|
|
45
|
+
username="neo4j",
|
|
46
|
+
password="password",
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
# Perform RAG query
|
|
50
|
+
results = await store.query("What is the relationship between X and Y?")
|
|
51
|
+
|
|
52
|
+
# Delete document data
|
|
53
|
+
await store.delete_by_document_id("doc123")
|
|
54
|
+
```
|
|
55
|
+
'''
|
|
56
|
+
neo4j_version_tuple: Incomplete
|
|
57
|
+
def __init__(self, url: str, username: str, password: str, lm_invoker: BaseLMInvoker | None = None, em_invoker: BaseEMInvoker | None = None, **kwargs: Any) -> None:
|
|
58
|
+
'''Initialize the LlamaIndexNeo4jGraphRAGDataStore.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
url (str): The Neo4j database URL (e.g., "bolt://localhost:7687").
|
|
62
|
+
username (str): The Neo4j database username.
|
|
63
|
+
password (str): The Neo4j database password.
|
|
64
|
+
lm_invoker (BaseLMInvoker | None, optional): GLLM language model invoker.
|
|
65
|
+
If provided, it will be automatically converted to a LlamaIndex LLM instance
|
|
66
|
+
by the parent class. Defaults to None.
|
|
67
|
+
em_invoker (BaseEMInvoker | None, optional): GLLM embedding model invoker.
|
|
68
|
+
If provided, it will be automatically converted to a LlamaIndex BaseEmbedding instance
|
|
69
|
+
by the parent class. Defaults to None.
|
|
70
|
+
**kwargs (Any): Additional keyword arguments passed to Neo4jPropertyGraphStore.
|
|
71
|
+
'''
|
|
72
|
+
async def delete_by_document_id(self, document_id: str, **kwargs: Any) -> None:
|
|
73
|
+
"""Delete nodes and edges by document ID.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
document_id (str): The document ID.
|
|
77
|
+
**kwargs (Any): Additional keyword arguments.
|
|
78
|
+
"""
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
from _typeshed import Incomplete
|
|
2
|
+
from gllm_datastore.graph_data_store.graph_data_store import BaseGraphDataStore as BaseGraphDataStore
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
class NebulaGraphDataStore(BaseGraphDataStore):
|
|
6
|
+
'''Implementation of BaseGraphDataStore for Nebula Graph.
|
|
7
|
+
|
|
8
|
+
This class provides an interface for graph-based Retrieval-Augmented Generation (RAG)
|
|
9
|
+
operations on Nebula graph databases.
|
|
10
|
+
|
|
11
|
+
Attributes:
|
|
12
|
+
connection_pool (ConnectionPool): The connection pool for Nebula Graph.
|
|
13
|
+
space (str): The space name.
|
|
14
|
+
user (str): The username.
|
|
15
|
+
password (str): The password.
|
|
16
|
+
operation_wait_time (int): The timeout in seconds.
|
|
17
|
+
|
|
18
|
+
Example:
|
|
19
|
+
```python
|
|
20
|
+
store = NebulaGraphDataStore(
|
|
21
|
+
url="127.0.0.1",
|
|
22
|
+
port=9669,
|
|
23
|
+
user="root",
|
|
24
|
+
password="nebula",
|
|
25
|
+
space="testing"
|
|
26
|
+
)
|
|
27
|
+
# Perform query
|
|
28
|
+
results = await store.query("MATCH (n) RETURN n")
|
|
29
|
+
|
|
30
|
+
# Create a node
|
|
31
|
+
node = await store.upsert_node("Person", "name", "John", {"age": 30})
|
|
32
|
+
```
|
|
33
|
+
'''
|
|
34
|
+
connection_pool: Incomplete
|
|
35
|
+
space: Incomplete
|
|
36
|
+
user: Incomplete
|
|
37
|
+
password: Incomplete
|
|
38
|
+
operation_wait_time: Incomplete
|
|
39
|
+
def __init__(self, url: str, port: int, user: str, password: str, space: str, operation_wait_time: int = 5) -> None:
|
|
40
|
+
"""Initialize NebulaGraphDataStore.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
url (str): The URL of the graph store.
|
|
44
|
+
port (int): The port of the graph store.
|
|
45
|
+
user (str): The user of the graph store.
|
|
46
|
+
password (str): The password of the graph store.
|
|
47
|
+
space (str): The space name.
|
|
48
|
+
operation_wait_time (int, optional): The operation wait time in seconds. Defaults to 5.
|
|
49
|
+
"""
|
|
50
|
+
async def upsert_node(self, label: str, identifier_key: str, identifier_value: str, properties: dict[str, Any] | None = None) -> Any:
|
|
51
|
+
"""Upsert a node in the graph.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
label (str): The label of the node.
|
|
55
|
+
identifier_key (str): The key of the identifier.
|
|
56
|
+
identifier_value (str): The value of the identifier.
|
|
57
|
+
properties (dict[str, Any] | None, optional): The properties of the node. Defaults to None.
|
|
58
|
+
|
|
59
|
+
Returns:
|
|
60
|
+
Any: The result of the operation.
|
|
61
|
+
"""
|
|
62
|
+
async def upsert_relationship(self, node_source_key: str, node_source_value: str, relation: str, node_target_key: str, node_target_value: str, properties: dict[str, Any] | None = None) -> Any:
|
|
63
|
+
"""Upsert a relationship between two nodes in the graph.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
node_source_key (str): The key of the source node.
|
|
67
|
+
node_source_value (str): The value of the source node.
|
|
68
|
+
relation (str): The type of the relationship.
|
|
69
|
+
node_target_key (str): The key of the target node.
|
|
70
|
+
node_target_value (str): The value of the target node.
|
|
71
|
+
properties (dict[str, Any] | None, optional): The properties of the relationship. Defaults to None.
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
Any: The result of the operation.
|
|
75
|
+
"""
|
|
76
|
+
async def delete_node(self, label: str, identifier_key: str, identifier_value: str) -> Any:
|
|
77
|
+
"""Delete a node from the graph.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
label (str): The label of the node.
|
|
81
|
+
identifier_key (str): The key of the identifier.
|
|
82
|
+
identifier_value (str): The identifier of the node.
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
Any: The result of the operation.
|
|
86
|
+
"""
|
|
87
|
+
async def delete_relationship(self, node_source_key: str, node_source_value: str, relation: str, node_target_key: str, node_target_value: str) -> Any:
|
|
88
|
+
"""Delete a relationship between two nodes in the graph.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
node_source_key (str): The key of the source node.
|
|
92
|
+
node_source_value (str): The identifier of the source node.
|
|
93
|
+
relation (str): The type of the relationship.
|
|
94
|
+
node_target_key (str): The key of the target node.
|
|
95
|
+
node_target_value (str): The identifier of the target node.
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
Any: The result of the operation.
|
|
99
|
+
"""
|
|
100
|
+
async def query(self, query: str, parameters: dict[str, Any] | None = None) -> list[dict[str, Any]]:
|
|
101
|
+
"""Query the graph store.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
query (str): The query to be executed.
|
|
105
|
+
parameters (dict[str, Any] | None, optional): The parameters of the query. Defaults to None.
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
list[dict[str, Any]]: The result of the query.
|
|
109
|
+
"""
|
|
110
|
+
async def traverse_graph(self, node_properties: dict[str, Any], extracted_node_properties: list[str] | None = None, extracted_relationship_properties: list[str] | None = None, depth: int = 3) -> tuple[list[dict[str, Any]], list[dict[str, Any]]]:
|
|
111
|
+
'''Traverse graph from a node with specified properties, ignoring relationship\'s direction, up to a given depth.
|
|
112
|
+
|
|
113
|
+
Example:
|
|
114
|
+
```python
|
|
115
|
+
nodes, relationships = await graph_data_store.traverse_graph(
|
|
116
|
+
node_properties={"name": "John Doe"},
|
|
117
|
+
extracted_node_properties=["name", "age"],
|
|
118
|
+
extracted_relationship_properties=["since"],
|
|
119
|
+
depth=1
|
|
120
|
+
)
|
|
121
|
+
```
|
|
122
|
+
Means starting from the node with property `name` equal to "John Doe", traverse
|
|
123
|
+
the graph up to depth 1, extracting the `name` and `age` properties from nodes
|
|
124
|
+
and the `since` property from relationships.
|
|
125
|
+
|
|
126
|
+
```python
|
|
127
|
+
nodes, relationships = await graph_data_store.traverse_graph(
|
|
128
|
+
node_properties={"name": "John Doe"},
|
|
129
|
+
depth=2
|
|
130
|
+
)
|
|
131
|
+
```
|
|
132
|
+
Means starting from the node with property `name` equal to "John Doe", traverse
|
|
133
|
+
the graph up to depth 2, extracting all properties from nodes and relationships.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
node_properties (dict[str, Any]): The properties of the starting node.
|
|
137
|
+
extracted_node_properties (list[str] | None, optional): The properties to extract from nodes during
|
|
138
|
+
traversal. If None or empty list, all node properties will be returned. Defaults to None.
|
|
139
|
+
extracted_relationship_properties (list[str] | None, optional): The properties to extract from relationships
|
|
140
|
+
during traversal. If None or empty list, all relationship properties will be returned. Defaults to None.
|
|
141
|
+
depth (int, optional): The depth of traversal. Defaults to 3.
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
tuple[list[dict[str, Any]], list[dict[str, Any]]]: A tuple containing two lists:
|
|
145
|
+
- List of nodes with their extracted properties (including the source node).
|
|
146
|
+
- List of relationships with their extracted properties.
|
|
147
|
+
|
|
148
|
+
Example return value:
|
|
149
|
+
nodes = [
|
|
150
|
+
{
|
|
151
|
+
"id": 1001,
|
|
152
|
+
"labels": ["Person"],
|
|
153
|
+
"properties": {
|
|
154
|
+
"name": "John Doe",
|
|
155
|
+
"age": 30,
|
|
156
|
+
"occupation": "Engineer"
|
|
157
|
+
}
|
|
158
|
+
},
|
|
159
|
+
{
|
|
160
|
+
"id": 2001,
|
|
161
|
+
"labels": ["Company"],
|
|
162
|
+
"properties": {
|
|
163
|
+
"name": "TechCorp",
|
|
164
|
+
"industry": "Technology",
|
|
165
|
+
"employees": 500
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
]
|
|
169
|
+
|
|
170
|
+
relationships = [
|
|
171
|
+
{
|
|
172
|
+
"id": 5002,
|
|
173
|
+
"type": "FRIEND_OF",
|
|
174
|
+
"start_node": 1001,
|
|
175
|
+
"end_node": 1002,
|
|
176
|
+
"properties": {
|
|
177
|
+
"since": "2018-05-20",
|
|
178
|
+
"closeness": 8
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
]
|
|
182
|
+
|
|
183
|
+
Raises:
|
|
184
|
+
ValueError: If node_properties is empty or depth is less than 1.
|
|
185
|
+
'''
|
|
186
|
+
async def close(self) -> None:
|
|
187
|
+
"""Close the graph data store."""
|
|
188
|
+
async def get_nodes(self, label: str | None = None) -> list[dict[str, Any]]:
|
|
189
|
+
"""Get all nodes with optional label filter.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
label (str | None, optional): The label of the nodes. Defaults to None.
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
list[dict[str, Any]]: The result of the query.
|
|
196
|
+
"""
|
|
197
|
+
async def get_relationships(self, source_value: str | None = None, relation: str | None = None) -> list[dict[str, Any]]:
|
|
198
|
+
"""Get relationships with optional filters.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
source_value (str | None, optional): The source vertex identifier. Defaults to None.
|
|
202
|
+
relation (str | None, optional): The relationship type. Defaults to None.
|
|
203
|
+
|
|
204
|
+
Returns:
|
|
205
|
+
list[dict[str, Any]]: The result of the query.
|
|
206
|
+
"""
|