real-ladybug 0.13.0__cp311-cp311-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- real_ladybug/__init__.py +83 -0
- real_ladybug/_lbug.cpython-311-darwin.so +0 -0
- real_ladybug/async_connection.py +226 -0
- real_ladybug/connection.py +323 -0
- real_ladybug/constants.py +7 -0
- real_ladybug/database.py +307 -0
- real_ladybug/prepared_statement.py +51 -0
- real_ladybug/py.typed +0 -0
- real_ladybug/query_result.py +511 -0
- real_ladybug/torch_geometric_feature_store.py +185 -0
- real_ladybug/torch_geometric_graph_store.py +131 -0
- real_ladybug/torch_geometric_result_converter.py +282 -0
- real_ladybug/types.py +39 -0
- real_ladybug-0.13.0.dist-info/METADATA +98 -0
- real_ladybug-0.13.0.dist-info/RECORD +19 -0
- real_ladybug-0.13.0.dist-info/WHEEL +6 -0
- real_ladybug-0.13.0.dist-info/licenses/LICENSE +21 -0
- real_ladybug-0.13.0.dist-info/top_level.txt +1 -0
- real_ladybug-0.13.0.dist-info/zip-safe +1 -0
real_ladybug/__init__.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
"""
|
|
2
|
+
# Lbug Python API bindings.
|
|
3
|
+
|
|
4
|
+
This package provides a Python API for Lbug graph database management system.
|
|
5
|
+
|
|
6
|
+
To install the package, run:
|
|
7
|
+
```
|
|
8
|
+
python3 -m pip install real_ladybug
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
Example usage:
|
|
12
|
+
```python
|
|
13
|
+
import real_ladybug as lb
|
|
14
|
+
|
|
15
|
+
db = lb.Database("./test")
|
|
16
|
+
conn = lb.Connection(db)
|
|
17
|
+
|
|
18
|
+
# Define the schema
|
|
19
|
+
conn.execute("CREATE NODE TABLE User(name STRING, age INT64, PRIMARY KEY (name))")
|
|
20
|
+
conn.execute("CREATE NODE TABLE City(name STRING, population INT64, PRIMARY KEY (name))")
|
|
21
|
+
conn.execute("CREATE REL TABLE Follows(FROM User TO User, since INT64)")
|
|
22
|
+
conn.execute("CREATE REL TABLE LivesIn(FROM User TO City)")
|
|
23
|
+
|
|
24
|
+
# Load some data
|
|
25
|
+
conn.execute('COPY User FROM "user.csv"')
|
|
26
|
+
conn.execute('COPY City FROM "city.csv"')
|
|
27
|
+
conn.execute('COPY Follows FROM "follows.csv"')
|
|
28
|
+
conn.execute('COPY LivesIn FROM "lives-in.csv"')
|
|
29
|
+
|
|
30
|
+
# Query the data
|
|
31
|
+
results = conn.execute("MATCH (u:User) RETURN u.name, u.age;")
|
|
32
|
+
while results.has_next():
|
|
33
|
+
print(results.get_next())
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
The dataset used in this example can be found [here](https://github.com/LadybugDB/ladybug/tree/master/dataset/demo-db/csv).
|
|
37
|
+
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
from __future__ import annotations
|
|
41
|
+
|
|
42
|
+
import os
|
|
43
|
+
import sys
|
|
44
|
+
|
|
45
|
+
# Set RTLD_GLOBAL and RTLD_LAZY flags on Linux to fix the issue with loading
|
|
46
|
+
# extensions
|
|
47
|
+
if sys.platform == "linux":
|
|
48
|
+
original_dlopen_flags = sys.getdlopenflags()
|
|
49
|
+
sys.setdlopenflags(os.RTLD_GLOBAL | os.RTLD_LAZY)
|
|
50
|
+
|
|
51
|
+
from .async_connection import AsyncConnection
|
|
52
|
+
from .connection import Connection
|
|
53
|
+
from .database import Database
|
|
54
|
+
from .prepared_statement import PreparedStatement
|
|
55
|
+
from .query_result import QueryResult
|
|
56
|
+
from .types import Type
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def __getattr__(name: str) -> str | int:
|
|
60
|
+
if name in ("version", "__version__"):
|
|
61
|
+
return Database.get_version()
|
|
62
|
+
elif name == "storage_version":
|
|
63
|
+
return Database.get_storage_version()
|
|
64
|
+
else:
|
|
65
|
+
msg = f"module {__name__!r} has no attribute {name!r}"
|
|
66
|
+
raise AttributeError(msg)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
# Restore the original dlopen flags
|
|
70
|
+
if sys.platform == "linux":
|
|
71
|
+
sys.setdlopenflags(original_dlopen_flags)
|
|
72
|
+
|
|
73
|
+
__all__ = [
|
|
74
|
+
"AsyncConnection",
|
|
75
|
+
"Connection",
|
|
76
|
+
"Database",
|
|
77
|
+
"PreparedStatement",
|
|
78
|
+
"QueryResult",
|
|
79
|
+
"Type",
|
|
80
|
+
"__version__", # noqa: F822
|
|
81
|
+
"storage_version", # noqa: F822
|
|
82
|
+
"version", # noqa: F822
|
|
83
|
+
]
|
|
Binary file
|
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import threading
|
|
5
|
+
import warnings
|
|
6
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
7
|
+
from typing import TYPE_CHECKING, Any
|
|
8
|
+
|
|
9
|
+
from .connection import Connection
|
|
10
|
+
from .prepared_statement import PreparedStatement
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
import sys
|
|
14
|
+
from types import TracebackType
|
|
15
|
+
|
|
16
|
+
from .database import Database
|
|
17
|
+
from .query_result import QueryResult
|
|
18
|
+
|
|
19
|
+
if sys.version_info >= (3, 11):
|
|
20
|
+
from typing import Self
|
|
21
|
+
else:
|
|
22
|
+
from typing_extensions import Self
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class AsyncConnection:
|
|
26
|
+
"""AsyncConnection enables asynchronous execution of queries with a pool of connections and threads."""
|
|
27
|
+
|
|
28
|
+
def __init__(
|
|
29
|
+
self,
|
|
30
|
+
database: Database,
|
|
31
|
+
max_concurrent_queries: int = 4,
|
|
32
|
+
max_threads_per_query: int = 0,
|
|
33
|
+
) -> None:
|
|
34
|
+
"""
|
|
35
|
+
Initialise the async connection.
|
|
36
|
+
|
|
37
|
+
Parameters
|
|
38
|
+
----------
|
|
39
|
+
database : Database
|
|
40
|
+
Database to connect to.
|
|
41
|
+
|
|
42
|
+
max_concurrent_queries : int
|
|
43
|
+
Maximum number of concurrent queries to execute. This corresponds to the
|
|
44
|
+
number of connections and thread pool size. Default is 4.
|
|
45
|
+
|
|
46
|
+
max_threads_per_query : int
|
|
47
|
+
Controls the maximum number of threads per connection that can be used
|
|
48
|
+
to execute one query. Default is 0, which means no limit.
|
|
49
|
+
"""
|
|
50
|
+
self.database = database
|
|
51
|
+
self.connections = [Connection(database) for _ in range(max_concurrent_queries)]
|
|
52
|
+
self.connections_counter = [0 for _ in range(max_concurrent_queries)]
|
|
53
|
+
self.lock = threading.Lock()
|
|
54
|
+
|
|
55
|
+
for conn in self.connections:
|
|
56
|
+
conn.init_connection()
|
|
57
|
+
conn.set_max_threads_for_exec(max_threads_per_query)
|
|
58
|
+
|
|
59
|
+
self.executor = ThreadPoolExecutor(max_workers=max_concurrent_queries)
|
|
60
|
+
|
|
61
|
+
def __enter__(self) -> Self:
|
|
62
|
+
return self
|
|
63
|
+
|
|
64
|
+
def __exit__(
|
|
65
|
+
self,
|
|
66
|
+
exc_type: type[BaseException] | None,
|
|
67
|
+
exc_value: BaseException | None,
|
|
68
|
+
exc_traceback: TracebackType | None,
|
|
69
|
+
) -> None:
|
|
70
|
+
self.close()
|
|
71
|
+
|
|
72
|
+
def __del__(self) -> None:
|
|
73
|
+
self.close()
|
|
74
|
+
|
|
75
|
+
def __get_connection_with_least_queries(self) -> tuple[Connection, int]:
|
|
76
|
+
with self.lock:
|
|
77
|
+
conn_index = self.connections_counter.index(min(self.connections_counter))
|
|
78
|
+
self.connections_counter[conn_index] += 1
|
|
79
|
+
return self.connections[conn_index], conn_index
|
|
80
|
+
|
|
81
|
+
def __decrement_connection_counter(self, conn_index: int) -> None:
|
|
82
|
+
"""Decrement the query counter for a connection."""
|
|
83
|
+
with self.lock:
|
|
84
|
+
self.connections_counter[conn_index] -= 1
|
|
85
|
+
if self.connections_counter[conn_index] < 0:
|
|
86
|
+
self.connections_counter[conn_index] = 0
|
|
87
|
+
|
|
88
|
+
def acquire_connection(self) -> Connection:
|
|
89
|
+
"""
|
|
90
|
+
Acquire a connection from the connection pool for temporary synchronous
|
|
91
|
+
calls. If the connection pool is oversubscribed, the method will return
|
|
92
|
+
the connection with the least number of queued queries. It is required
|
|
93
|
+
to release the connection by calling `release_connection` after the
|
|
94
|
+
connection is no longer needed.
|
|
95
|
+
|
|
96
|
+
Returns
|
|
97
|
+
-------
|
|
98
|
+
Connection
|
|
99
|
+
A connection object.
|
|
100
|
+
"""
|
|
101
|
+
conn, _ = self.__get_connection_with_least_queries()
|
|
102
|
+
return conn
|
|
103
|
+
|
|
104
|
+
def release_connection(self, conn: Connection) -> None:
|
|
105
|
+
"""
|
|
106
|
+
Release a connection acquired by `acquire_connection` back to the
|
|
107
|
+
connection pool. Calling this method is required when the connection is
|
|
108
|
+
no longer needed.
|
|
109
|
+
|
|
110
|
+
Parameters
|
|
111
|
+
----------
|
|
112
|
+
conn : Connection
|
|
113
|
+
Connection object to release.
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
"""
|
|
117
|
+
for i, existing_conn in enumerate(self.connections):
|
|
118
|
+
if existing_conn == conn:
|
|
119
|
+
self.__decrement_connection_counter(i)
|
|
120
|
+
break
|
|
121
|
+
|
|
122
|
+
def set_query_timeout(self, timeout_in_ms: int) -> None:
|
|
123
|
+
"""
|
|
124
|
+
Set the query timeout value in ms for executing queries.
|
|
125
|
+
|
|
126
|
+
Parameters
|
|
127
|
+
----------
|
|
128
|
+
timeout_in_ms : int
|
|
129
|
+
query timeout value in ms for executing queries.
|
|
130
|
+
|
|
131
|
+
"""
|
|
132
|
+
for conn in self.connections:
|
|
133
|
+
conn.set_query_timeout(timeout_in_ms)
|
|
134
|
+
|
|
135
|
+
async def execute(
|
|
136
|
+
self, query: str | PreparedStatement, parameters: dict[str, Any] | None = None
|
|
137
|
+
) -> QueryResult | list[QueryResult]:
|
|
138
|
+
"""
|
|
139
|
+
Execute a query asynchronously.
|
|
140
|
+
|
|
141
|
+
Parameters
|
|
142
|
+
----------
|
|
143
|
+
query : str | PreparedStatement
|
|
144
|
+
A prepared statement or a query string.
|
|
145
|
+
If a query string is given, a prepared statement will be created
|
|
146
|
+
automatically.
|
|
147
|
+
|
|
148
|
+
parameters : dict[str, Any]
|
|
149
|
+
Parameters for the query.
|
|
150
|
+
|
|
151
|
+
Returns
|
|
152
|
+
-------
|
|
153
|
+
QueryResult
|
|
154
|
+
Query result.
|
|
155
|
+
|
|
156
|
+
"""
|
|
157
|
+
loop = asyncio.get_running_loop()
|
|
158
|
+
# If the query is a prepared statement, use the connection associated with it
|
|
159
|
+
if isinstance(query, PreparedStatement):
|
|
160
|
+
conn = query._connection
|
|
161
|
+
for i, existing_conn in enumerate(self.connections):
|
|
162
|
+
if existing_conn == conn:
|
|
163
|
+
conn_index = i
|
|
164
|
+
with self.lock:
|
|
165
|
+
self.connections_counter[conn_index] += 1
|
|
166
|
+
break
|
|
167
|
+
else:
|
|
168
|
+
conn, conn_index = self.__get_connection_with_least_queries()
|
|
169
|
+
|
|
170
|
+
try:
|
|
171
|
+
return await loop.run_in_executor(self.executor, conn.execute, query, parameters)
|
|
172
|
+
except asyncio.CancelledError:
|
|
173
|
+
conn.interrupt()
|
|
174
|
+
finally:
|
|
175
|
+
self.__decrement_connection_counter(conn_index)
|
|
176
|
+
|
|
177
|
+
async def _prepare(self, query: str, parameters: dict[str, Any] | None = None) -> PreparedStatement:
|
|
178
|
+
"""
|
|
179
|
+
The only parameters supported during prepare are dataframes.
|
|
180
|
+
Any remaining parameters will be ignored and should be passed to execute().
|
|
181
|
+
""" # noqa: D401
|
|
182
|
+
loop = asyncio.get_running_loop()
|
|
183
|
+
conn, conn_index = self.__get_connection_with_least_queries()
|
|
184
|
+
|
|
185
|
+
try:
|
|
186
|
+
preparedStatement = await loop.run_in_executor(self.executor, conn.prepare, query, parameters)
|
|
187
|
+
return preparedStatement
|
|
188
|
+
finally:
|
|
189
|
+
self.__decrement_connection_counter(conn_index)
|
|
190
|
+
|
|
191
|
+
async def prepare(self, query: str, parameters: dict[str, Any] | None = None) -> PreparedStatement:
|
|
192
|
+
"""
|
|
193
|
+
Create a prepared statement for a query asynchronously.
|
|
194
|
+
|
|
195
|
+
Parameters
|
|
196
|
+
----------
|
|
197
|
+
query : str
|
|
198
|
+
Query to prepare.
|
|
199
|
+
parameters : dict[str, Any]
|
|
200
|
+
Parameters for the query.
|
|
201
|
+
|
|
202
|
+
Returns
|
|
203
|
+
-------
|
|
204
|
+
PreparedStatement
|
|
205
|
+
Prepared statement.
|
|
206
|
+
|
|
207
|
+
"""
|
|
208
|
+
warnings.warn(
|
|
209
|
+
"The use of separate prepare + execute of queries is deprecated. "
|
|
210
|
+
"Please using a single call to the execute() API instead.",
|
|
211
|
+
DeprecationWarning,
|
|
212
|
+
stacklevel=2,
|
|
213
|
+
)
|
|
214
|
+
return await self._prepare(query, parameters)
|
|
215
|
+
|
|
216
|
+
def close(self) -> None:
|
|
217
|
+
"""
|
|
218
|
+
Close all connections and shutdown the thread pool.
|
|
219
|
+
|
|
220
|
+
Note: Call to this method is optional. The connections and thread pool
|
|
221
|
+
will be closed automatically when the instance is garbage collected.
|
|
222
|
+
"""
|
|
223
|
+
for conn in self.connections:
|
|
224
|
+
conn.close()
|
|
225
|
+
|
|
226
|
+
self.executor.shutdown(wait=True)
|
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import warnings
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Callable
|
|
5
|
+
|
|
6
|
+
from . import _lbug
|
|
7
|
+
from .prepared_statement import PreparedStatement
|
|
8
|
+
from .query_result import QueryResult
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
import sys
|
|
12
|
+
from types import TracebackType
|
|
13
|
+
|
|
14
|
+
from .database import Database
|
|
15
|
+
from .types import Type
|
|
16
|
+
|
|
17
|
+
if sys.version_info >= (3, 11):
|
|
18
|
+
from typing import Self
|
|
19
|
+
else:
|
|
20
|
+
from typing_extensions import Self
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class Connection:
|
|
24
|
+
"""Connection to a database."""
|
|
25
|
+
|
|
26
|
+
def __init__(self, database: Database, num_threads: int = 0):
|
|
27
|
+
"""
|
|
28
|
+
Initialise lbug database connection.
|
|
29
|
+
|
|
30
|
+
Parameters
|
|
31
|
+
----------
|
|
32
|
+
database : Database
|
|
33
|
+
Database to connect to.
|
|
34
|
+
|
|
35
|
+
num_threads : int
|
|
36
|
+
Maximum number of threads to use for executing queries.
|
|
37
|
+
|
|
38
|
+
"""
|
|
39
|
+
self._connection: Any = None # (type: _lbug.Connection from pybind11)
|
|
40
|
+
self.database = database
|
|
41
|
+
self.num_threads = num_threads
|
|
42
|
+
self.is_closed = False
|
|
43
|
+
self.init_connection()
|
|
44
|
+
|
|
45
|
+
def __getstate__(self) -> dict[str, Any]:
|
|
46
|
+
state = {
|
|
47
|
+
"database": self.database,
|
|
48
|
+
"num_threads": self.num_threads,
|
|
49
|
+
"_connection": None,
|
|
50
|
+
}
|
|
51
|
+
return state
|
|
52
|
+
|
|
53
|
+
def init_connection(self) -> None:
|
|
54
|
+
"""Establish a connection to the database, if not already initalised."""
|
|
55
|
+
if self.is_closed:
|
|
56
|
+
error_msg = "Connection is closed."
|
|
57
|
+
raise RuntimeError(error_msg)
|
|
58
|
+
self.database.init_database()
|
|
59
|
+
if self._connection is None:
|
|
60
|
+
self._connection = _lbug.Connection(self.database._database, self.num_threads) # type: ignore[union-attr]
|
|
61
|
+
|
|
62
|
+
def set_max_threads_for_exec(self, num_threads: int) -> None:
|
|
63
|
+
"""
|
|
64
|
+
Set the maximum number of threads for executing queries.
|
|
65
|
+
|
|
66
|
+
Parameters
|
|
67
|
+
----------
|
|
68
|
+
num_threads : int
|
|
69
|
+
Maximum number of threads to use for executing queries.
|
|
70
|
+
|
|
71
|
+
"""
|
|
72
|
+
self.init_connection()
|
|
73
|
+
self._connection.set_max_threads_for_exec(num_threads)
|
|
74
|
+
|
|
75
|
+
def close(self) -> None:
|
|
76
|
+
"""
|
|
77
|
+
Close the connection.
|
|
78
|
+
|
|
79
|
+
Note: Call to this method is optional. The connection will be closed
|
|
80
|
+
automatically when the object goes out of scope.
|
|
81
|
+
"""
|
|
82
|
+
if self._connection is not None:
|
|
83
|
+
self._connection.close()
|
|
84
|
+
self._connection = None
|
|
85
|
+
self.is_closed = True
|
|
86
|
+
|
|
87
|
+
def __enter__(self) -> Self:
|
|
88
|
+
return self
|
|
89
|
+
|
|
90
|
+
def __exit__(
|
|
91
|
+
self,
|
|
92
|
+
exc_type: type[BaseException] | None,
|
|
93
|
+
exc_value: BaseException | None,
|
|
94
|
+
exc_traceback: TracebackType | None,
|
|
95
|
+
) -> None:
|
|
96
|
+
self.close()
|
|
97
|
+
|
|
98
|
+
def execute(
|
|
99
|
+
self,
|
|
100
|
+
query: str | PreparedStatement,
|
|
101
|
+
parameters: dict[str, Any] | None = None,
|
|
102
|
+
) -> QueryResult | list[QueryResult]:
|
|
103
|
+
"""
|
|
104
|
+
Execute a query.
|
|
105
|
+
|
|
106
|
+
Parameters
|
|
107
|
+
----------
|
|
108
|
+
query : str | PreparedStatement
|
|
109
|
+
A prepared statement or a query string.
|
|
110
|
+
If a query string is given, a prepared statement will be created
|
|
111
|
+
automatically.
|
|
112
|
+
|
|
113
|
+
parameters : dict[str, Any]
|
|
114
|
+
Parameters for the query.
|
|
115
|
+
|
|
116
|
+
Returns
|
|
117
|
+
-------
|
|
118
|
+
QueryResult
|
|
119
|
+
Query result.
|
|
120
|
+
|
|
121
|
+
"""
|
|
122
|
+
if parameters is None:
|
|
123
|
+
parameters = {}
|
|
124
|
+
|
|
125
|
+
self.init_connection()
|
|
126
|
+
if not isinstance(parameters, dict):
|
|
127
|
+
msg = f"Parameters must be a dict; found {type(parameters)}."
|
|
128
|
+
raise RuntimeError(msg) # noqa: TRY004
|
|
129
|
+
|
|
130
|
+
if len(parameters) == 0 and isinstance(query, str):
|
|
131
|
+
query_result_internal = self._connection.query(query)
|
|
132
|
+
else:
|
|
133
|
+
prepared_statement = self._prepare(query, parameters) if isinstance(query, str) else query
|
|
134
|
+
query_result_internal = self._connection.execute(prepared_statement._prepared_statement, parameters)
|
|
135
|
+
if not query_result_internal.isSuccess():
|
|
136
|
+
raise RuntimeError(query_result_internal.getErrorMessage())
|
|
137
|
+
current_query_result = QueryResult(self, query_result_internal)
|
|
138
|
+
if not query_result_internal.hasNextQueryResult():
|
|
139
|
+
return current_query_result
|
|
140
|
+
all_query_results = [current_query_result]
|
|
141
|
+
while query_result_internal.hasNextQueryResult():
|
|
142
|
+
query_result_internal = query_result_internal.getNextQueryResult()
|
|
143
|
+
if not query_result_internal.isSuccess():
|
|
144
|
+
raise RuntimeError(query_result_internal.getErrorMessage())
|
|
145
|
+
all_query_results.append(QueryResult(self, query_result_internal))
|
|
146
|
+
return all_query_results
|
|
147
|
+
|
|
148
|
+
def _prepare(
|
|
149
|
+
self,
|
|
150
|
+
query: str,
|
|
151
|
+
parameters: dict[str, Any] | None = None,
|
|
152
|
+
) -> PreparedStatement:
|
|
153
|
+
"""
|
|
154
|
+
The only parameters supported during prepare are dataframes.
|
|
155
|
+
Any remaining parameters will be ignored and should be passed to execute().
|
|
156
|
+
""" # noqa: D401
|
|
157
|
+
return PreparedStatement(self, query, parameters)
|
|
158
|
+
|
|
159
|
+
def prepare(
|
|
160
|
+
self,
|
|
161
|
+
query: str,
|
|
162
|
+
parameters: dict[str, Any] | None = None,
|
|
163
|
+
) -> PreparedStatement:
|
|
164
|
+
"""
|
|
165
|
+
Create a prepared statement for a query.
|
|
166
|
+
|
|
167
|
+
Parameters
|
|
168
|
+
----------
|
|
169
|
+
query : str
|
|
170
|
+
Query to prepare.
|
|
171
|
+
|
|
172
|
+
parameters : dict[str, Any]
|
|
173
|
+
Parameters for the query.
|
|
174
|
+
|
|
175
|
+
Returns
|
|
176
|
+
-------
|
|
177
|
+
PreparedStatement
|
|
178
|
+
Prepared statement.
|
|
179
|
+
|
|
180
|
+
"""
|
|
181
|
+
warnings.warn(
|
|
182
|
+
"The use of separate prepare + execute of queries is deprecated. "
|
|
183
|
+
"Please using a single call to the execute() API instead.",
|
|
184
|
+
DeprecationWarning,
|
|
185
|
+
stacklevel=2,
|
|
186
|
+
)
|
|
187
|
+
return self._prepare(query, parameters)
|
|
188
|
+
|
|
189
|
+
def _get_node_property_names(self, table_name: str) -> dict[str, Any]:
|
|
190
|
+
LIST_START_SYMBOL = "["
|
|
191
|
+
LIST_END_SYMBOL = "]"
|
|
192
|
+
self.init_connection()
|
|
193
|
+
query_result = self.execute(f"CALL table_info('{table_name}') RETURN *;")
|
|
194
|
+
results = {}
|
|
195
|
+
while query_result.has_next():
|
|
196
|
+
row = query_result.get_next()
|
|
197
|
+
prop_name = row[1]
|
|
198
|
+
prop_type = row[2]
|
|
199
|
+
is_primary_key = row[4] is True
|
|
200
|
+
dimension = prop_type.count(LIST_START_SYMBOL)
|
|
201
|
+
splitted = prop_type.split(LIST_START_SYMBOL)
|
|
202
|
+
shape = []
|
|
203
|
+
for s in splitted:
|
|
204
|
+
if LIST_END_SYMBOL not in s:
|
|
205
|
+
continue
|
|
206
|
+
s = s.split(LIST_END_SYMBOL)[0]
|
|
207
|
+
if s != "":
|
|
208
|
+
shape.append(int(s))
|
|
209
|
+
prop_type = splitted[0]
|
|
210
|
+
results[prop_name] = {
|
|
211
|
+
"type": prop_type,
|
|
212
|
+
"dimension": dimension,
|
|
213
|
+
"is_primary_key": is_primary_key,
|
|
214
|
+
}
|
|
215
|
+
if len(shape) > 0:
|
|
216
|
+
results[prop_name]["shape"] = tuple(shape)
|
|
217
|
+
return results
|
|
218
|
+
|
|
219
|
+
def _get_node_table_names(self) -> list[Any]:
|
|
220
|
+
results = []
|
|
221
|
+
self.init_connection()
|
|
222
|
+
query_result = self.execute("CALL show_tables() RETURN *;")
|
|
223
|
+
while query_result.has_next():
|
|
224
|
+
row = query_result.get_next()
|
|
225
|
+
if row[2] == "NODE":
|
|
226
|
+
results.append(row[1])
|
|
227
|
+
return results
|
|
228
|
+
|
|
229
|
+
def _get_rel_table_names(self) -> list[dict[str, Any]]:
|
|
230
|
+
results = []
|
|
231
|
+
self.init_connection()
|
|
232
|
+
tables_result = self.execute("CALL show_tables() RETURN *;")
|
|
233
|
+
while tables_result.has_next():
|
|
234
|
+
row = tables_result.get_next()
|
|
235
|
+
if row[2] == "REL":
|
|
236
|
+
name = row[1]
|
|
237
|
+
connections_result = self.execute(f"CALL show_connection({name!r}) RETURN *;")
|
|
238
|
+
src_dst_row = connections_result.get_next()
|
|
239
|
+
src_node = src_dst_row[0]
|
|
240
|
+
dst_node = src_dst_row[1]
|
|
241
|
+
results.append({"name": name, "src": src_node, "dst": dst_node})
|
|
242
|
+
return results
|
|
243
|
+
|
|
244
|
+
def set_query_timeout(self, timeout_in_ms: int) -> None:
|
|
245
|
+
"""
|
|
246
|
+
Set the query timeout value in ms for executing queries.
|
|
247
|
+
|
|
248
|
+
Parameters
|
|
249
|
+
----------
|
|
250
|
+
timeout_in_ms : int
|
|
251
|
+
query timeout value in ms for executing queries.
|
|
252
|
+
|
|
253
|
+
"""
|
|
254
|
+
self.init_connection()
|
|
255
|
+
self._connection.set_query_timeout(timeout_in_ms)
|
|
256
|
+
|
|
257
|
+
def interrupt(self) -> None:
|
|
258
|
+
"""
|
|
259
|
+
Interrupts execution of the current query.
|
|
260
|
+
|
|
261
|
+
If there is no currently executing query, this function does nothing.
|
|
262
|
+
"""
|
|
263
|
+
self._connection.interrupt()
|
|
264
|
+
|
|
265
|
+
def create_function(
|
|
266
|
+
self,
|
|
267
|
+
name: str,
|
|
268
|
+
udf: Callable[[...], Any],
|
|
269
|
+
params_type: list[Type | str] | None = None,
|
|
270
|
+
return_type: Type | str = "",
|
|
271
|
+
*,
|
|
272
|
+
default_null_handling: bool = True,
|
|
273
|
+
catch_exceptions: bool = False,
|
|
274
|
+
) -> None:
|
|
275
|
+
"""
|
|
276
|
+
Set a User Defined Function (UDF) for use in cypher queries.
|
|
277
|
+
|
|
278
|
+
Parameters
|
|
279
|
+
----------
|
|
280
|
+
name: str
|
|
281
|
+
name of function
|
|
282
|
+
|
|
283
|
+
udf: Callable[[...], Any]
|
|
284
|
+
function to be executed
|
|
285
|
+
|
|
286
|
+
params_type: Optional[list[Type]]
|
|
287
|
+
list of Type enums to describe the input parameters
|
|
288
|
+
|
|
289
|
+
return_type: Optional[Type]
|
|
290
|
+
a Type enum to describe the returned value
|
|
291
|
+
|
|
292
|
+
default_null_handling: Optional[bool]
|
|
293
|
+
if true, when any parameter is null, the resulting value will be null
|
|
294
|
+
|
|
295
|
+
catch_exceptions: Optional[bool]
|
|
296
|
+
if true, when an exception is thrown from python, the function output will be null
|
|
297
|
+
Otherwise, the exception will be rethrown
|
|
298
|
+
"""
|
|
299
|
+
if params_type is None:
|
|
300
|
+
params_type = []
|
|
301
|
+
parsed_params_type = [x if type(x) is str else x.value for x in params_type]
|
|
302
|
+
if type(return_type) is not str:
|
|
303
|
+
return_type = return_type.value
|
|
304
|
+
|
|
305
|
+
self._connection.create_function(
|
|
306
|
+
name=name,
|
|
307
|
+
udf=udf,
|
|
308
|
+
params_type=parsed_params_type,
|
|
309
|
+
return_value=return_type,
|
|
310
|
+
default_null=default_null_handling,
|
|
311
|
+
catch_exceptions=catch_exceptions,
|
|
312
|
+
)
|
|
313
|
+
|
|
314
|
+
def remove_function(self, name: str) -> None:
|
|
315
|
+
"""
|
|
316
|
+
Remove a User Defined Function (UDF).
|
|
317
|
+
|
|
318
|
+
Parameters
|
|
319
|
+
----------
|
|
320
|
+
name: str
|
|
321
|
+
name of function to be removed.
|
|
322
|
+
"""
|
|
323
|
+
self._connection.remove_function(name)
|