real-ladybug 0.0.1.dev1__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of real-ladybug might be problematic. Click here for more details.
- real_ladybug/__init__.py +83 -0
- real_ladybug/_lbug.cp311-win_amd64.pyd +0 -0
- real_ladybug/_lbug.exp +0 -0
- real_ladybug/_lbug.lib +0 -0
- real_ladybug/async_connection.py +226 -0
- real_ladybug/connection.py +323 -0
- real_ladybug/constants.py +7 -0
- real_ladybug/database.py +307 -0
- real_ladybug/prepared_statement.py +51 -0
- real_ladybug/py.typed +0 -0
- real_ladybug/query_result.py +511 -0
- real_ladybug/torch_geometric_feature_store.py +185 -0
- real_ladybug/torch_geometric_graph_store.py +131 -0
- real_ladybug/torch_geometric_result_converter.py +282 -0
- real_ladybug/types.py +39 -0
- real_ladybug-0.0.1.dev1.dist-info/METADATA +88 -0
- real_ladybug-0.0.1.dev1.dist-info/RECORD +114 -0
- real_ladybug-0.0.1.dev1.dist-info/WHEEL +5 -0
- real_ladybug-0.0.1.dev1.dist-info/licenses/LICENSE +21 -0
- real_ladybug-0.0.1.dev1.dist-info/top_level.txt +3 -0
- real_ladybug-0.0.1.dev1.dist-info/zip-safe +1 -0
- real_ladybug-source/scripts/antlr4/hash.py +2 -0
- real_ladybug-source/scripts/antlr4/keywordhandler.py +47 -0
- real_ladybug-source/scripts/collect-extensions.py +68 -0
- real_ladybug-source/scripts/collect-single-file-header.py +126 -0
- real_ladybug-source/scripts/export-dbs.py +101 -0
- real_ladybug-source/scripts/export-import-test.py +345 -0
- real_ladybug-source/scripts/extension/purge-beta.py +34 -0
- real_ladybug-source/scripts/generate-cpp-docs/collect_files.py +122 -0
- real_ladybug-source/scripts/generate-tinysnb.py +34 -0
- real_ladybug-source/scripts/get-clangd-diagnostics.py +233 -0
- real_ladybug-source/scripts/migrate-lbug-db.py +308 -0
- real_ladybug-source/scripts/multiplatform-test-helper/collect-results.py +71 -0
- real_ladybug-source/scripts/multiplatform-test-helper/notify-discord.py +68 -0
- real_ladybug-source/scripts/pip-package/package_tar.py +90 -0
- real_ladybug-source/scripts/pip-package/setup.py +130 -0
- real_ladybug-source/scripts/run-clang-format.py +408 -0
- real_ladybug-source/scripts/setup-extension-repo.py +67 -0
- real_ladybug-source/scripts/test-simsimd-dispatch.py +45 -0
- real_ladybug-source/scripts/update-nightly-build-version.py +81 -0
- real_ladybug-source/third_party/brotli/scripts/dictionary/step-01-download-rfc.py +16 -0
- real_ladybug-source/third_party/brotli/scripts/dictionary/step-02-rfc-to-bin.py +34 -0
- real_ladybug-source/third_party/brotli/scripts/dictionary/step-03-validate-bin.py +35 -0
- real_ladybug-source/third_party/brotli/scripts/dictionary/step-04-generate-java-literals.py +85 -0
- real_ladybug-source/third_party/pybind11/tools/codespell_ignore_lines_from_errors.py +35 -0
- real_ladybug-source/third_party/pybind11/tools/libsize.py +36 -0
- real_ladybug-source/third_party/pybind11/tools/make_changelog.py +63 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/__init__.py +83 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/async_connection.py +226 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/connection.py +323 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/constants.py +7 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/database.py +307 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/prepared_statement.py +51 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/py.typed +0 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/query_result.py +511 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/torch_geometric_feature_store.py +185 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/torch_geometric_graph_store.py +131 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/torch_geometric_result_converter.py +282 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/types.py +39 -0
- real_ladybug-source/tools/python_api/src_py/__init__.py +83 -0
- real_ladybug-source/tools/python_api/src_py/async_connection.py +226 -0
- real_ladybug-source/tools/python_api/src_py/connection.py +323 -0
- real_ladybug-source/tools/python_api/src_py/constants.py +7 -0
- real_ladybug-source/tools/python_api/src_py/database.py +307 -0
- real_ladybug-source/tools/python_api/src_py/prepared_statement.py +51 -0
- real_ladybug-source/tools/python_api/src_py/py.typed +0 -0
- real_ladybug-source/tools/python_api/src_py/query_result.py +511 -0
- real_ladybug-source/tools/python_api/src_py/torch_geometric_feature_store.py +185 -0
- real_ladybug-source/tools/python_api/src_py/torch_geometric_graph_store.py +131 -0
- real_ladybug-source/tools/python_api/src_py/torch_geometric_result_converter.py +282 -0
- real_ladybug-source/tools/python_api/src_py/types.py +39 -0
- real_ladybug-source/tools/python_api/test/conftest.py +230 -0
- real_ladybug-source/tools/python_api/test/disabled_test_extension.py +73 -0
- real_ladybug-source/tools/python_api/test/ground_truth.py +430 -0
- real_ladybug-source/tools/python_api/test/test_arrow.py +694 -0
- real_ladybug-source/tools/python_api/test/test_async_connection.py +159 -0
- real_ladybug-source/tools/python_api/test/test_blob_parameter.py +145 -0
- real_ladybug-source/tools/python_api/test/test_connection.py +49 -0
- real_ladybug-source/tools/python_api/test/test_database.py +234 -0
- real_ladybug-source/tools/python_api/test/test_datatype.py +372 -0
- real_ladybug-source/tools/python_api/test/test_df.py +564 -0
- real_ladybug-source/tools/python_api/test/test_dict.py +112 -0
- real_ladybug-source/tools/python_api/test/test_exception.py +54 -0
- real_ladybug-source/tools/python_api/test/test_fsm.py +227 -0
- real_ladybug-source/tools/python_api/test/test_get_header.py +49 -0
- real_ladybug-source/tools/python_api/test/test_helper.py +8 -0
- real_ladybug-source/tools/python_api/test/test_issue.py +147 -0
- real_ladybug-source/tools/python_api/test/test_iteration.py +96 -0
- real_ladybug-source/tools/python_api/test/test_networkx.py +437 -0
- real_ladybug-source/tools/python_api/test/test_parameter.py +340 -0
- real_ladybug-source/tools/python_api/test/test_prepared_statement.py +117 -0
- real_ladybug-source/tools/python_api/test/test_query_result.py +54 -0
- real_ladybug-source/tools/python_api/test/test_query_result_close.py +44 -0
- real_ladybug-source/tools/python_api/test/test_scan_pandas.py +676 -0
- real_ladybug-source/tools/python_api/test/test_scan_pandas_pyarrow.py +714 -0
- real_ladybug-source/tools/python_api/test/test_scan_polars.py +165 -0
- real_ladybug-source/tools/python_api/test/test_scan_pyarrow.py +167 -0
- real_ladybug-source/tools/python_api/test/test_timeout.py +11 -0
- real_ladybug-source/tools/python_api/test/test_torch_geometric.py +640 -0
- real_ladybug-source/tools/python_api/test/test_torch_geometric_remote_backend.py +111 -0
- real_ladybug-source/tools/python_api/test/test_udf.py +207 -0
- real_ladybug-source/tools/python_api/test/test_version.py +6 -0
- real_ladybug-source/tools/python_api/test/test_wal.py +80 -0
- real_ladybug-source/tools/python_api/test/type_aliases.py +10 -0
- real_ladybug-source/tools/rust_api/update_version.py +47 -0
- real_ladybug-source/tools/shell/test/conftest.py +218 -0
- real_ladybug-source/tools/shell/test/test_helper.py +60 -0
- real_ladybug-source/tools/shell/test/test_shell_basics.py +325 -0
- real_ladybug-source/tools/shell/test/test_shell_commands.py +656 -0
- real_ladybug-source/tools/shell/test/test_shell_control_edit.py +438 -0
- real_ladybug-source/tools/shell/test/test_shell_control_search.py +468 -0
- real_ladybug-source/tools/shell/test/test_shell_esc_edit.py +232 -0
- real_ladybug-source/tools/shell/test/test_shell_esc_search.py +162 -0
- real_ladybug-source/tools/shell/test/test_shell_flags.py +645 -0
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
import argparse
|
|
3
|
+
import io
|
|
4
|
+
import json
|
|
5
|
+
import multiprocessing
|
|
6
|
+
import os
|
|
7
|
+
import subprocess
|
|
8
|
+
import sys
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class LSPClient:
|
|
12
|
+
def __init__(self, *, compile_commands_dir=None, jobs=None, verbose=False):
|
|
13
|
+
args = ["clangd", "-j", str(jobs)]
|
|
14
|
+
if jobs is None:
|
|
15
|
+
jobs = multiprocessing.cpu_count()
|
|
16
|
+
|
|
17
|
+
if compile_commands_dir is not None:
|
|
18
|
+
args += ["--compile-commands-dir", compile_commands_dir]
|
|
19
|
+
|
|
20
|
+
self.id = 0
|
|
21
|
+
self.child = subprocess.Popen(
|
|
22
|
+
args,
|
|
23
|
+
stdin=subprocess.PIPE,
|
|
24
|
+
stdout=subprocess.PIPE,
|
|
25
|
+
# Let clangd inherit our stderr, or suppress it entirely.
|
|
26
|
+
stderr=None if verbose else subprocess.DEVNULL,
|
|
27
|
+
)
|
|
28
|
+
self.stdin = io.TextIOWrapper(self.child.stdin, newline="\r\n")
|
|
29
|
+
self.stdout = io.TextIOWrapper(self.child.stdout, newline="\r\n")
|
|
30
|
+
|
|
31
|
+
def request(self, method, params):
|
|
32
|
+
self.send_request(method, params)
|
|
33
|
+
return self.recv_response()
|
|
34
|
+
|
|
35
|
+
def send_request(self, method, params):
|
|
36
|
+
self.id += 1
|
|
37
|
+
self.send_json(
|
|
38
|
+
dict(
|
|
39
|
+
id=self.id,
|
|
40
|
+
jsonrpc="2.0",
|
|
41
|
+
method=method,
|
|
42
|
+
params=params,
|
|
43
|
+
)
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
def send_json(self, json_data):
|
|
47
|
+
data = json.dumps(json_data)
|
|
48
|
+
bindata = data.encode("utf-8")
|
|
49
|
+
header = f"Content-Length: {len(bindata)}\r\n\r\n"
|
|
50
|
+
|
|
51
|
+
self.stdin.write(header + data)
|
|
52
|
+
self.stdin.flush()
|
|
53
|
+
|
|
54
|
+
def recv_response(self):
|
|
55
|
+
json_data = self.recv_json()
|
|
56
|
+
assert json_data["id"] == self.id
|
|
57
|
+
assert "error" not in json_data
|
|
58
|
+
return json_data["result"]
|
|
59
|
+
|
|
60
|
+
def send_notif(self, method, params):
|
|
61
|
+
self.send_json(
|
|
62
|
+
dict(
|
|
63
|
+
jsonrpc="2.0",
|
|
64
|
+
method=method,
|
|
65
|
+
params=params,
|
|
66
|
+
)
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
def expect_notif(self, method):
|
|
70
|
+
json_data = self.recv_json()
|
|
71
|
+
assert json_data["method"] == method
|
|
72
|
+
assert "error" not in json_data
|
|
73
|
+
return json_data["params"]
|
|
74
|
+
|
|
75
|
+
def recv_json(self):
|
|
76
|
+
header = self.stdout.readline()
|
|
77
|
+
content_len_header = "Content-Length: "
|
|
78
|
+
assert header.startswith(content_len_header)
|
|
79
|
+
assert header.endswith("\r\n")
|
|
80
|
+
data_len = int(header[len(content_len_header) : -2])
|
|
81
|
+
|
|
82
|
+
# Expect end of header
|
|
83
|
+
assert self.stdout.read(2) == "\r\n"
|
|
84
|
+
|
|
85
|
+
data = self.stdout.read(data_len)
|
|
86
|
+
return json.loads(data)
|
|
87
|
+
|
|
88
|
+
def initialize(self, project):
|
|
89
|
+
return self.request(
|
|
90
|
+
"initialize",
|
|
91
|
+
dict(
|
|
92
|
+
processId=os.getpid(),
|
|
93
|
+
rootUri="file://" + project,
|
|
94
|
+
capabilities={},
|
|
95
|
+
),
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
def open_file(self, file):
|
|
99
|
+
with open(file) as f:
|
|
100
|
+
file_content = f.read()
|
|
101
|
+
|
|
102
|
+
self.send_notif(
|
|
103
|
+
"textDocument/didOpen",
|
|
104
|
+
dict(
|
|
105
|
+
textDocument=dict(
|
|
106
|
+
uri="file://" + os.path.realpath(file),
|
|
107
|
+
languageId="cpp",
|
|
108
|
+
version=1,
|
|
109
|
+
text=file_content,
|
|
110
|
+
)
|
|
111
|
+
),
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
def show_diagnostics(self):
|
|
115
|
+
diagnostics_response = self.expect_notif("textDocument/publishDiagnostics")
|
|
116
|
+
uri = diagnostics_response["uri"]
|
|
117
|
+
file_prefix = "file://"
|
|
118
|
+
assert uri.startswith(file_prefix)
|
|
119
|
+
file = uri[len(file_prefix) :]
|
|
120
|
+
diagnostics = diagnostics_response["diagnostics"]
|
|
121
|
+
for diagnostic in diagnostics:
|
|
122
|
+
LSPClient.show_diagnostic(file, diagnostic)
|
|
123
|
+
|
|
124
|
+
return len(diagnostics) != 0
|
|
125
|
+
|
|
126
|
+
def show_diagnostic(file, diagnostic):
|
|
127
|
+
range = diagnostic["range"]
|
|
128
|
+
start = range["start"]
|
|
129
|
+
line = start["line"]
|
|
130
|
+
message = diagnostic["message"]
|
|
131
|
+
print(f"{file}:{line}:{message}")
|
|
132
|
+
|
|
133
|
+
def send_shutdown(self):
|
|
134
|
+
self.send_request("shutdown", None)
|
|
135
|
+
|
|
136
|
+
def recv_shutdown_send_exit(self):
|
|
137
|
+
self.send_notif("exit", None)
|
|
138
|
+
|
|
139
|
+
def wait(self):
|
|
140
|
+
assert self.child.wait() == 0
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def shutdown_all(clients):
|
|
144
|
+
# Shutdown the clients in parallel. This drastically speeds up cleanup time.
|
|
145
|
+
for client in clients:
|
|
146
|
+
client.send_shutdown()
|
|
147
|
+
|
|
148
|
+
for client in clients:
|
|
149
|
+
client.recv_shutdown_send_exit()
|
|
150
|
+
|
|
151
|
+
for client in clients:
|
|
152
|
+
client.wait()
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def get_clients(client_count, compile_commands_dir, total_jobs, verbose):
|
|
156
|
+
# Distribute jobs evenly.
|
|
157
|
+
# Clients near the front get more jobs, but they also may get more files.
|
|
158
|
+
job_count = [0] * client_count
|
|
159
|
+
for i in range(total_jobs):
|
|
160
|
+
job_count[i % client_count] += 1
|
|
161
|
+
return [LSPClient(compile_commands_dir=compile_commands_dir, jobs=jobs, verbose=verbose) for jobs in job_count]
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def get_diagnostics(files, *, client_count, compile_commands_dir, total_jobs, verbose):
|
|
165
|
+
if client_count > total_jobs:
|
|
166
|
+
print(f"Client count {client_count} is greater than total jobs {total_jobs}. Forcing the client count to {total_jobs}.", file=sys.stderr)
|
|
167
|
+
client_count = total_jobs
|
|
168
|
+
|
|
169
|
+
project = os.getcwd()
|
|
170
|
+
|
|
171
|
+
client_count = min(client_count, len(files))
|
|
172
|
+
clients = get_clients(client_count, compile_commands_dir, total_jobs, verbose)
|
|
173
|
+
for client in clients:
|
|
174
|
+
client.initialize(project)
|
|
175
|
+
|
|
176
|
+
# Similar to distributing jobs, we distribute files evenly.
|
|
177
|
+
file_counts = [0] * client_count
|
|
178
|
+
for i, file in enumerate(files):
|
|
179
|
+
client_idx = i % client_count
|
|
180
|
+
clients[client_idx].open_file(file)
|
|
181
|
+
file_counts[client_idx] += 1
|
|
182
|
+
|
|
183
|
+
any_diagnostic = 0
|
|
184
|
+
for file_count, client in zip(file_counts, clients):
|
|
185
|
+
for _ in range(file_count):
|
|
186
|
+
any_diagnostic |= client.show_diagnostics()
|
|
187
|
+
|
|
188
|
+
shutdown_all(clients)
|
|
189
|
+
return any_diagnostic
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def main():
|
|
193
|
+
parser = argparse.ArgumentParser(
|
|
194
|
+
prog="get-clangd-diagnostics.py",
|
|
195
|
+
description="Scan project for any clangd diagnostics (including warnings) and outputs them.",
|
|
196
|
+
)
|
|
197
|
+
parser.add_argument("files", nargs="+", help="Files to scan")
|
|
198
|
+
parser.add_argument(
|
|
199
|
+
"--instances",
|
|
200
|
+
type=int,
|
|
201
|
+
default=4,
|
|
202
|
+
help="Number of clangd instances to spawn in parallel. Defaults to 4.",
|
|
203
|
+
)
|
|
204
|
+
parser.add_argument(
|
|
205
|
+
"-j",
|
|
206
|
+
"--jobs",
|
|
207
|
+
type=int,
|
|
208
|
+
help="Number of total jobs across all servers. Defaults to the CPU count.",
|
|
209
|
+
)
|
|
210
|
+
parser.add_argument(
|
|
211
|
+
"-p",
|
|
212
|
+
"--compile-commands-dir",
|
|
213
|
+
help="Directory containining compile_commands.json",
|
|
214
|
+
)
|
|
215
|
+
parser.add_argument(
|
|
216
|
+
"-v",
|
|
217
|
+
"--verbose",
|
|
218
|
+
action="store_true",
|
|
219
|
+
help="Show clangd debug output",
|
|
220
|
+
)
|
|
221
|
+
|
|
222
|
+
args = parser.parse_args(sys.argv[1:])
|
|
223
|
+
jobs = args.jobs if args.jobs is not None else multiprocessing.cpu_count()
|
|
224
|
+
return get_diagnostics(
|
|
225
|
+
args.files,
|
|
226
|
+
client_count=args.instances,
|
|
227
|
+
compile_commands_dir=args.compile_commands_dir,
|
|
228
|
+
total_jobs=jobs,
|
|
229
|
+
verbose=args.verbose,
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
sys.exit(main())
|
|
@@ -0,0 +1,308 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Lbug Database Migration Script
|
|
4
|
+
|
|
5
|
+
This script helps migrate Lbug databases between versions.
|
|
6
|
+
- Sets up isolated Python environments for each Lbug version
|
|
7
|
+
- Exports data from the source database using the old version
|
|
8
|
+
- Imports data into the target database using the new version
|
|
9
|
+
- If `overwrite` is enabled, the target database will replace the source database and the source database will be backed up with an `_old` suffix
|
|
10
|
+
- If `delete-old` is enabled, the source database will be deleted
|
|
11
|
+
|
|
12
|
+
Usage Examples:
|
|
13
|
+
# Basic migration from 0.9.0 to 0.11.0
|
|
14
|
+
python migrate-lbug-db.py --old-version 0.9.0 --new-version 0.11.0 --old-db /path/to/old/database --new-db /path/to/new/database
|
|
15
|
+
|
|
16
|
+
Notes:
|
|
17
|
+
- Can only be used to migrate to newer Lbug versions, from 0.11.0 onwards
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import tempfile
|
|
21
|
+
import sys
|
|
22
|
+
import struct
|
|
23
|
+
import shutil
|
|
24
|
+
import subprocess
|
|
25
|
+
import argparse
|
|
26
|
+
import os
|
|
27
|
+
|
|
28
|
+
# Database file extensions
|
|
29
|
+
LBUG_FILE_EXTENSIONS = ["", ".wal", ".shadow"]
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# FIXME: Replace this with a Lbug query to get the mapping when available.
|
|
33
|
+
lbug_version_mapping = {
|
|
34
|
+
34: "0.7.0",
|
|
35
|
+
35: "0.7.1",
|
|
36
|
+
36: "0.8.2",
|
|
37
|
+
37: "0.9.0",
|
|
38
|
+
38: "0.10.1",
|
|
39
|
+
39: "0.11.0",
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
minimum_lbug_migration_version = "0.11.0"
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def lbug_version_comparison(version: str, target: str) -> bool:
|
|
46
|
+
"""Return True if Lbug *v* is greater or equal to target version"""
|
|
47
|
+
# Transform version string to version tuple to use in version tuple comparison
|
|
48
|
+
# NOTE: If version info contains non digit info (like dev release info 0.11.0.dev1) set the value of the non digit
|
|
49
|
+
# tuple part to be 0 (transform it to 0.11.0.0)
|
|
50
|
+
target = tuple(int(part) if part.isdigit() else 0 for part in target.split("."))
|
|
51
|
+
current = tuple(int(part) if part.isdigit() else 0 for part in version.split("."))
|
|
52
|
+
return current >= target
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def read_lbug_storage_version(lbug_db_path: str) -> int:
|
|
56
|
+
"""
|
|
57
|
+
Reads the Lbug storage version.
|
|
58
|
+
|
|
59
|
+
:param lbug_db_path: Path to the Lbug database file/directory.
|
|
60
|
+
:return: Storage version code as an integer.
|
|
61
|
+
"""
|
|
62
|
+
if os.path.isdir(lbug_db_path):
|
|
63
|
+
lbug_version_file_path = os.path.join(lbug_db_path, "catalog.kz")
|
|
64
|
+
if not os.path.isfile(lbug_version_file_path):
|
|
65
|
+
raise FileNotFoundError("Lbug catalog.kz file does not exist")
|
|
66
|
+
else:
|
|
67
|
+
lbug_version_file_path = lbug_db_path
|
|
68
|
+
|
|
69
|
+
with open(lbug_version_file_path, "rb") as f:
|
|
70
|
+
f.seek(4)
|
|
71
|
+
# Read the next 8 bytes as a little-endian unsigned 64-bit integer
|
|
72
|
+
data = f.read(8)
|
|
73
|
+
if len(data) < 8:
|
|
74
|
+
raise ValueError(
|
|
75
|
+
f"File '{lbug_version_file_path}' does not contain a storage version code."
|
|
76
|
+
)
|
|
77
|
+
version_code = struct.unpack("<Q", data)[0]
|
|
78
|
+
|
|
79
|
+
if version_code in lbug_version_mapping:
|
|
80
|
+
return lbug_version_mapping[version_code]
|
|
81
|
+
else:
|
|
82
|
+
raise ValueError(f"Could not map version_code {version_code} to proper Lbug version.")
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def ensure_env(version: str, export_dir) -> str:
|
|
86
|
+
"""
|
|
87
|
+
Creates a venv at `{export_dir}/.lbug_envs/{version}` and installs `lbug=={version}`
|
|
88
|
+
Returns the venv's python executable path.
|
|
89
|
+
"""
|
|
90
|
+
# Use temp directory to create venv
|
|
91
|
+
lbug_envs_dir = os.path.join(export_dir, ".lbug_envs")
|
|
92
|
+
|
|
93
|
+
# venv base under the script directory
|
|
94
|
+
base = os.path.join(lbug_envs_dir, version)
|
|
95
|
+
py_bin = os.path.join(base, "bin", "python")
|
|
96
|
+
# If environment already exists clean it
|
|
97
|
+
if os.path.isdir(base):
|
|
98
|
+
shutil.rmtree(base)
|
|
99
|
+
|
|
100
|
+
print(f"→ Setting up venv for Lbug {version}...", file=sys.stderr)
|
|
101
|
+
# Create venv
|
|
102
|
+
# NOTE: Running python in debug mode can cause issues with creating a virtual environment from that python instance
|
|
103
|
+
subprocess.run([sys.executable, "-m", "venv", base], check=True)
|
|
104
|
+
# Install the specific Lbug version
|
|
105
|
+
subprocess.run([py_bin, "-m", "pip", "install", "--upgrade", "pip"], check=True)
|
|
106
|
+
subprocess.run([py_bin, "-m", "pip", "install", f"lbug=={version}"], check=True)
|
|
107
|
+
return py_bin
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def run_migration_step(python_exe: str, db_path: str, cypher: str):
|
|
111
|
+
"""
|
|
112
|
+
Uses `python_exe` to connect to the Lbug database at `db_path` and run the `cypher` query.
|
|
113
|
+
"""
|
|
114
|
+
snippet = f"""
|
|
115
|
+
import lbug
|
|
116
|
+
db = lbug.Database(r"{db_path}")
|
|
117
|
+
conn = lbug.Connection(db)
|
|
118
|
+
conn.execute(r\"\"\"{cypher}\"\"\")
|
|
119
|
+
"""
|
|
120
|
+
proc = subprocess.run([python_exe, "-c", snippet], capture_output=True, text=True)
|
|
121
|
+
if proc.returncode != 0:
|
|
122
|
+
print(f"Error: query failed:\n{cypher}\n{proc.stderr}", file=sys.stderr)
|
|
123
|
+
sys.exit(proc.returncode)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def lbug_migration(
|
|
127
|
+
new_db, old_db, new_version, old_version=None, overwrite=None, delete_old=None
|
|
128
|
+
):
|
|
129
|
+
"""
|
|
130
|
+
Main migration function that handles the complete migration process.
|
|
131
|
+
"""
|
|
132
|
+
if new_db == old_db:
|
|
133
|
+
raise ValueError(
|
|
134
|
+
"The new database path cannot be the same as the old database path. Please provide a different path for the new database."
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
if not lbug_version_comparison(
|
|
138
|
+
version=new_version, target=minimum_lbug_migration_version
|
|
139
|
+
):
|
|
140
|
+
raise ValueError(
|
|
141
|
+
f"New version for lbug is not supported, has to be equal or higher than version: {minimum_lbug_migration_version}"
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
print(
|
|
145
|
+
f"Migrating Lbug database from {old_version} to {new_version}", file=sys.stderr
|
|
146
|
+
)
|
|
147
|
+
print(f"Source: {old_db}", file=sys.stderr)
|
|
148
|
+
print("", file=sys.stderr)
|
|
149
|
+
|
|
150
|
+
# If version of old lbug db is not provided try to determine it based on file info
|
|
151
|
+
if not old_version:
|
|
152
|
+
old_version = read_lbug_storage_version(old_db)
|
|
153
|
+
|
|
154
|
+
# Check if old database exists
|
|
155
|
+
if not os.path.exists(old_db):
|
|
156
|
+
raise FileNotFoundError(f"Source database '{old_db}' does not exist.")
|
|
157
|
+
|
|
158
|
+
# Prepare target - ensure parent directory exists but remove target if it exists
|
|
159
|
+
parent_dir = os.path.dirname(new_db)
|
|
160
|
+
if parent_dir:
|
|
161
|
+
os.makedirs(parent_dir, exist_ok=True)
|
|
162
|
+
|
|
163
|
+
if os.path.exists(new_db):
|
|
164
|
+
raise FileExistsError(
|
|
165
|
+
f"File already exists at {new_db}, remove file or change new database file path to continue"
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
# Use temp directory for all processing, it will be cleaned up after with statement
|
|
169
|
+
with tempfile.TemporaryDirectory() as export_dir:
|
|
170
|
+
# Set up environments
|
|
171
|
+
print(f"Setting up Lbug {old_version} environment...", file=sys.stderr)
|
|
172
|
+
old_py = ensure_env(old_version, export_dir)
|
|
173
|
+
print(f"Setting up Lbug {new_version} environment...", file=sys.stderr)
|
|
174
|
+
new_py = ensure_env(new_version, export_dir)
|
|
175
|
+
|
|
176
|
+
export_file = os.path.join(export_dir, "lbug_export")
|
|
177
|
+
print(f"Exporting old DB → {export_dir}", file=sys.stderr)
|
|
178
|
+
run_migration_step(old_py, old_db, f"EXPORT DATABASE '{export_file}'")
|
|
179
|
+
print("Export complete.", file=sys.stderr)
|
|
180
|
+
|
|
181
|
+
# Check if export files were created and have content
|
|
182
|
+
schema_file = os.path.join(export_file, "schema.cypher")
|
|
183
|
+
if not os.path.exists(schema_file) or os.path.getsize(schema_file) == 0:
|
|
184
|
+
raise ValueError(f"Schema file not found: {schema_file}")
|
|
185
|
+
|
|
186
|
+
print(f"Importing into new DB at {new_db}", file=sys.stderr)
|
|
187
|
+
run_migration_step(new_py, new_db, f"IMPORT DATABASE '{export_file}'")
|
|
188
|
+
print("Import complete.", file=sys.stderr)
|
|
189
|
+
|
|
190
|
+
# Rename new lbug database to old lbug database name if enabled
|
|
191
|
+
if overwrite or delete_old:
|
|
192
|
+
# Remove lbug lock from migrated DB
|
|
193
|
+
lock_file = new_db + ".lock"
|
|
194
|
+
if os.path.exists(lock_file):
|
|
195
|
+
os.remove(lock_file)
|
|
196
|
+
rename_databases(old_db, old_version, new_db, delete_old)
|
|
197
|
+
|
|
198
|
+
print("Lbug graph database migration finished successfully!")
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def rename_databases(old_db: str, old_version: str, new_db: str, delete_old: bool):
|
|
202
|
+
"""
|
|
203
|
+
When overwrite is enabled, back up the original old_db (file with .shadow and .wal or directory)
|
|
204
|
+
by renaming it to *_old, and replace it with the newly imported new_db files.
|
|
205
|
+
|
|
206
|
+
When delete_old is enabled, replace the old database with the new one and delete old database.
|
|
207
|
+
|
|
208
|
+
:raises FileNotFoundError: If the original database path is not found
|
|
209
|
+
:raises OSError: If file operations fail
|
|
210
|
+
"""
|
|
211
|
+
base_dir = os.path.dirname(old_db)
|
|
212
|
+
name = os.path.basename(old_db.rstrip(os.sep))
|
|
213
|
+
# Add _old_ and version info to backup graph database
|
|
214
|
+
backup_database_name = f"{name}_old_" + old_version.replace(".", "_")
|
|
215
|
+
backup_base = os.path.join(base_dir, backup_database_name)
|
|
216
|
+
|
|
217
|
+
if os.path.isfile(old_db):
|
|
218
|
+
# File-based database: handle main file and accompanying lock/WAL
|
|
219
|
+
for ext in LBUG_FILE_EXTENSIONS:
|
|
220
|
+
src = old_db + ext
|
|
221
|
+
dst = backup_base + ext
|
|
222
|
+
if os.path.exists(src):
|
|
223
|
+
if delete_old:
|
|
224
|
+
os.remove(src)
|
|
225
|
+
else:
|
|
226
|
+
os.rename(src, dst)
|
|
227
|
+
print(f"Renamed '{src}' to '{dst}'", file=sys.stderr)
|
|
228
|
+
elif os.path.isdir(old_db):
|
|
229
|
+
# Directory-based Lbug database
|
|
230
|
+
backup_dir = backup_base
|
|
231
|
+
if delete_old:
|
|
232
|
+
shutil.rmtree(old_db)
|
|
233
|
+
else:
|
|
234
|
+
os.rename(old_db, backup_dir)
|
|
235
|
+
print(f"Renamed directory '{old_db}' to '{backup_dir}'", file=sys.stderr)
|
|
236
|
+
else:
|
|
237
|
+
print(
|
|
238
|
+
f"Original database path '{old_db}' not found for renaming.",
|
|
239
|
+
file=sys.stderr,
|
|
240
|
+
)
|
|
241
|
+
sys.exit(1)
|
|
242
|
+
|
|
243
|
+
# Now move new files into place
|
|
244
|
+
for ext in ["", ".wal", ".shadow"]:
|
|
245
|
+
src_new = new_db + ext
|
|
246
|
+
dst_new = os.path.join(base_dir, name + ext)
|
|
247
|
+
if os.path.exists(src_new):
|
|
248
|
+
os.rename(src_new, dst_new)
|
|
249
|
+
print(f"Renamed '{src_new}' to '{dst_new}'", file=sys.stderr)
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def main():
|
|
253
|
+
p = argparse.ArgumentParser(
|
|
254
|
+
description="Migrate Lbug DB via PyPI versions",
|
|
255
|
+
epilog="""
|
|
256
|
+
Examples:
|
|
257
|
+
%(prog)s --old-version 0.9.0 --new-version 0.11.0 \\
|
|
258
|
+
--old-db /path/to/old/db --new-db /path/to/new/db --overwrite
|
|
259
|
+
|
|
260
|
+
Note: This script will create temporary virtual environments in .lbug_envs/ directory
|
|
261
|
+
to isolate different Lbug versions.
|
|
262
|
+
""",
|
|
263
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
264
|
+
)
|
|
265
|
+
p.add_argument(
|
|
266
|
+
"--old-version",
|
|
267
|
+
required=False,
|
|
268
|
+
default=None,
|
|
269
|
+
help="Source Lbug version (e.g., 0.9.0). If not provided, automatic lbug version detection will be attempted.",
|
|
270
|
+
)
|
|
271
|
+
p.add_argument(
|
|
272
|
+
"--new-version", required=True, help="Target Lbug version (e.g., 0.11.0)"
|
|
273
|
+
)
|
|
274
|
+
p.add_argument("--old-db", required=True, help="Path to source database directory")
|
|
275
|
+
p.add_argument(
|
|
276
|
+
"--new-db",
|
|
277
|
+
required=True,
|
|
278
|
+
help="Path to target database directory, it can't be the same path as the old database. Use the overwrite flag if you want to replace the old database with the new one.",
|
|
279
|
+
)
|
|
280
|
+
p.add_argument(
|
|
281
|
+
"--overwrite",
|
|
282
|
+
required=False,
|
|
283
|
+
action="store_true",
|
|
284
|
+
default=False,
|
|
285
|
+
help="Rename new-db to the old-db name and location, and keeps old-db as backup if delete-old is not True",
|
|
286
|
+
)
|
|
287
|
+
p.add_argument(
|
|
288
|
+
"--delete-old",
|
|
289
|
+
required=False,
|
|
290
|
+
action="store_true",
|
|
291
|
+
default=False,
|
|
292
|
+
help="When overwrite and delete-old are True, old-db will not be stored as backup",
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
args = p.parse_args()
|
|
296
|
+
|
|
297
|
+
lbug_migration(
|
|
298
|
+
new_db=args.new_db,
|
|
299
|
+
old_db=args.old_db,
|
|
300
|
+
new_version=args.new_version,
|
|
301
|
+
old_version=args.old_version,
|
|
302
|
+
overwrite=args.overwrite,
|
|
303
|
+
delete_old=args.delete_old,
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
if __name__ == "__main__":
|
|
308
|
+
main()
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
import pandas as pd
|
|
6
|
+
import json
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def main():
|
|
10
|
+
if len(sys.argv) != 2:
|
|
11
|
+
print("Usage: collect-results.py <results_dir>")
|
|
12
|
+
sys.exit(1)
|
|
13
|
+
if not os.path.isdir(sys.argv[1]):
|
|
14
|
+
print(f"Error: {sys.argv[1]} is not a directory")
|
|
15
|
+
sys.exit(1)
|
|
16
|
+
results_dir = sys.argv[1]
|
|
17
|
+
results_df_hash = {}
|
|
18
|
+
results_exit_codes_hash = {}
|
|
19
|
+
results_summary = {}
|
|
20
|
+
stages = []
|
|
21
|
+
for root, _, files in os.walk(results_dir):
|
|
22
|
+
for csv_file in files:
|
|
23
|
+
if not csv_file.endswith(".csv"):
|
|
24
|
+
continue
|
|
25
|
+
name_split = csv_file.split(".")
|
|
26
|
+
name_split.pop()
|
|
27
|
+
platform = ".".join(name_split)
|
|
28
|
+
df = pd.read_csv(os.path.join(root, csv_file), header=None)
|
|
29
|
+
df.columns = ["stage", "exit_code"]
|
|
30
|
+
results_df_hash[platform] = df
|
|
31
|
+
|
|
32
|
+
for platform, df in results_df_hash.items():
|
|
33
|
+
for stage, exit_code in df.values:
|
|
34
|
+
if stage not in stages:
|
|
35
|
+
stages.append(stage)
|
|
36
|
+
if platform not in results_exit_codes_hash:
|
|
37
|
+
results_exit_codes_hash[platform] = {}
|
|
38
|
+
results_exit_codes_hash[platform][stage] = int(exit_code)
|
|
39
|
+
|
|
40
|
+
for platform in results_df_hash.keys():
|
|
41
|
+
results_summary[platform] = []
|
|
42
|
+
for stage in stages:
|
|
43
|
+
if stage in results_exit_codes_hash[platform]:
|
|
44
|
+
if results_exit_codes_hash[platform][stage] == 0:
|
|
45
|
+
status = "✅"
|
|
46
|
+
else:
|
|
47
|
+
status = "❌"
|
|
48
|
+
else:
|
|
49
|
+
status = "❓"
|
|
50
|
+
results_summary[platform].append({"stage": stage, "status": status})
|
|
51
|
+
|
|
52
|
+
summary_df = {"stage": stages}
|
|
53
|
+
for platform, summary in results_summary.items():
|
|
54
|
+
df = pd.DataFrame(summary)
|
|
55
|
+
status = df["status"]
|
|
56
|
+
summary_df[platform] = status
|
|
57
|
+
summary_df = pd.DataFrame(summary_df)
|
|
58
|
+
summary_df.index = summary_df["stage"]
|
|
59
|
+
del summary_df["stage"]
|
|
60
|
+
summary_df.index.name = None
|
|
61
|
+
|
|
62
|
+
markdown = summary_df.to_markdown()
|
|
63
|
+
with open("results.md", "w") as f:
|
|
64
|
+
f.write(markdown)
|
|
65
|
+
|
|
66
|
+
with open("results.json", "w") as f:
|
|
67
|
+
json.dump(results_summary, f, indent=4)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
if __name__ == "__main__":
|
|
71
|
+
main()
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import discord
|
|
2
|
+
import os
|
|
3
|
+
import json
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
TOKEN = os.getenv("DISCORD_BOT_TOKEN")
|
|
7
|
+
CHANNEL_ID = os.getenv("DISCORD_CHANNEL_ID")
|
|
8
|
+
GITHUB_URL = os.getenv("GITHUB_URL")
|
|
9
|
+
|
|
10
|
+
messages = []
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
if __name__ == "__main__":
|
|
14
|
+
if not len(sys.argv) == 2:
|
|
15
|
+
print("Usage: python send-discord-notification.py <result.json>")
|
|
16
|
+
sys.exit(1)
|
|
17
|
+
if not os.path.isfile(sys.argv[1]):
|
|
18
|
+
print(f"Error: {sys.argv[1]} is not a file")
|
|
19
|
+
sys.exit(1)
|
|
20
|
+
if not TOKEN:
|
|
21
|
+
print("Error: DISCORD_BOT_TOKEN is not set")
|
|
22
|
+
sys.exit(1)
|
|
23
|
+
|
|
24
|
+
if not CHANNEL_ID:
|
|
25
|
+
print("Error: DISCORD_CHANNEL_ID is not set")
|
|
26
|
+
sys.exit(1)
|
|
27
|
+
client = discord.Client(intents=discord.Intents.default())
|
|
28
|
+
|
|
29
|
+
@client.event
|
|
30
|
+
async def on_ready():
|
|
31
|
+
channel = client.get_channel(int(CHANNEL_ID))
|
|
32
|
+
for message in messages:
|
|
33
|
+
try:
|
|
34
|
+
await channel.send(message)
|
|
35
|
+
except Exception as e:
|
|
36
|
+
print(f"Error: {e}")
|
|
37
|
+
sys.exit(1)
|
|
38
|
+
await client.close()
|
|
39
|
+
|
|
40
|
+
message = "## Multiplatform test result:\n"
|
|
41
|
+
with open(sys.argv[1], "r") as f:
|
|
42
|
+
result = json.load(f)
|
|
43
|
+
failures = {}
|
|
44
|
+
stages = []
|
|
45
|
+
for platform in sorted(result.keys()):
|
|
46
|
+
for r in result[platform]:
|
|
47
|
+
if r['stage'] not in stages:
|
|
48
|
+
stages.append(r['stage'])
|
|
49
|
+
if r['status'] != "✅":
|
|
50
|
+
failures.setdefault(platform, list()).append(r)
|
|
51
|
+
message += f"- Platforms:\n - {', '.join(sorted(result.keys()))}\n"
|
|
52
|
+
message += f"- Stages:\n - {', '.join(stages)}\n"
|
|
53
|
+
# Add only the failures.
|
|
54
|
+
if len(failures) > 0:
|
|
55
|
+
message += "### Failures:\n"
|
|
56
|
+
for platform in sorted(failures.keys()):
|
|
57
|
+
if len(message) >= 1500:
|
|
58
|
+
messages.append(message)
|
|
59
|
+
message = ""
|
|
60
|
+
message += f"- **{platform}**:\n"
|
|
61
|
+
for r in failures[platform]:
|
|
62
|
+
message += f" - {r['stage']}: {r['status']}\n"
|
|
63
|
+
if GITHUB_URL:
|
|
64
|
+
message += "\n"
|
|
65
|
+
message += f" [Github]({GITHUB_URL})"
|
|
66
|
+
if message:
|
|
67
|
+
messages.append(message)
|
|
68
|
+
client.run(TOKEN)
|