real-ladybug 0.0.1.dev1__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of real-ladybug might be problematic. Click here for more details.
- real_ladybug/__init__.py +83 -0
- real_ladybug/_lbug.cp311-win_amd64.pyd +0 -0
- real_ladybug/_lbug.exp +0 -0
- real_ladybug/_lbug.lib +0 -0
- real_ladybug/async_connection.py +226 -0
- real_ladybug/connection.py +323 -0
- real_ladybug/constants.py +7 -0
- real_ladybug/database.py +307 -0
- real_ladybug/prepared_statement.py +51 -0
- real_ladybug/py.typed +0 -0
- real_ladybug/query_result.py +511 -0
- real_ladybug/torch_geometric_feature_store.py +185 -0
- real_ladybug/torch_geometric_graph_store.py +131 -0
- real_ladybug/torch_geometric_result_converter.py +282 -0
- real_ladybug/types.py +39 -0
- real_ladybug-0.0.1.dev1.dist-info/METADATA +88 -0
- real_ladybug-0.0.1.dev1.dist-info/RECORD +114 -0
- real_ladybug-0.0.1.dev1.dist-info/WHEEL +5 -0
- real_ladybug-0.0.1.dev1.dist-info/licenses/LICENSE +21 -0
- real_ladybug-0.0.1.dev1.dist-info/top_level.txt +3 -0
- real_ladybug-0.0.1.dev1.dist-info/zip-safe +1 -0
- real_ladybug-source/scripts/antlr4/hash.py +2 -0
- real_ladybug-source/scripts/antlr4/keywordhandler.py +47 -0
- real_ladybug-source/scripts/collect-extensions.py +68 -0
- real_ladybug-source/scripts/collect-single-file-header.py +126 -0
- real_ladybug-source/scripts/export-dbs.py +101 -0
- real_ladybug-source/scripts/export-import-test.py +345 -0
- real_ladybug-source/scripts/extension/purge-beta.py +34 -0
- real_ladybug-source/scripts/generate-cpp-docs/collect_files.py +122 -0
- real_ladybug-source/scripts/generate-tinysnb.py +34 -0
- real_ladybug-source/scripts/get-clangd-diagnostics.py +233 -0
- real_ladybug-source/scripts/migrate-lbug-db.py +308 -0
- real_ladybug-source/scripts/multiplatform-test-helper/collect-results.py +71 -0
- real_ladybug-source/scripts/multiplatform-test-helper/notify-discord.py +68 -0
- real_ladybug-source/scripts/pip-package/package_tar.py +90 -0
- real_ladybug-source/scripts/pip-package/setup.py +130 -0
- real_ladybug-source/scripts/run-clang-format.py +408 -0
- real_ladybug-source/scripts/setup-extension-repo.py +67 -0
- real_ladybug-source/scripts/test-simsimd-dispatch.py +45 -0
- real_ladybug-source/scripts/update-nightly-build-version.py +81 -0
- real_ladybug-source/third_party/brotli/scripts/dictionary/step-01-download-rfc.py +16 -0
- real_ladybug-source/third_party/brotli/scripts/dictionary/step-02-rfc-to-bin.py +34 -0
- real_ladybug-source/third_party/brotli/scripts/dictionary/step-03-validate-bin.py +35 -0
- real_ladybug-source/third_party/brotli/scripts/dictionary/step-04-generate-java-literals.py +85 -0
- real_ladybug-source/third_party/pybind11/tools/codespell_ignore_lines_from_errors.py +35 -0
- real_ladybug-source/third_party/pybind11/tools/libsize.py +36 -0
- real_ladybug-source/third_party/pybind11/tools/make_changelog.py +63 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/__init__.py +83 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/async_connection.py +226 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/connection.py +323 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/constants.py +7 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/database.py +307 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/prepared_statement.py +51 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/py.typed +0 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/query_result.py +511 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/torch_geometric_feature_store.py +185 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/torch_geometric_graph_store.py +131 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/torch_geometric_result_converter.py +282 -0
- real_ladybug-source/tools/python_api/build/real_ladybug/types.py +39 -0
- real_ladybug-source/tools/python_api/src_py/__init__.py +83 -0
- real_ladybug-source/tools/python_api/src_py/async_connection.py +226 -0
- real_ladybug-source/tools/python_api/src_py/connection.py +323 -0
- real_ladybug-source/tools/python_api/src_py/constants.py +7 -0
- real_ladybug-source/tools/python_api/src_py/database.py +307 -0
- real_ladybug-source/tools/python_api/src_py/prepared_statement.py +51 -0
- real_ladybug-source/tools/python_api/src_py/py.typed +0 -0
- real_ladybug-source/tools/python_api/src_py/query_result.py +511 -0
- real_ladybug-source/tools/python_api/src_py/torch_geometric_feature_store.py +185 -0
- real_ladybug-source/tools/python_api/src_py/torch_geometric_graph_store.py +131 -0
- real_ladybug-source/tools/python_api/src_py/torch_geometric_result_converter.py +282 -0
- real_ladybug-source/tools/python_api/src_py/types.py +39 -0
- real_ladybug-source/tools/python_api/test/conftest.py +230 -0
- real_ladybug-source/tools/python_api/test/disabled_test_extension.py +73 -0
- real_ladybug-source/tools/python_api/test/ground_truth.py +430 -0
- real_ladybug-source/tools/python_api/test/test_arrow.py +694 -0
- real_ladybug-source/tools/python_api/test/test_async_connection.py +159 -0
- real_ladybug-source/tools/python_api/test/test_blob_parameter.py +145 -0
- real_ladybug-source/tools/python_api/test/test_connection.py +49 -0
- real_ladybug-source/tools/python_api/test/test_database.py +234 -0
- real_ladybug-source/tools/python_api/test/test_datatype.py +372 -0
- real_ladybug-source/tools/python_api/test/test_df.py +564 -0
- real_ladybug-source/tools/python_api/test/test_dict.py +112 -0
- real_ladybug-source/tools/python_api/test/test_exception.py +54 -0
- real_ladybug-source/tools/python_api/test/test_fsm.py +227 -0
- real_ladybug-source/tools/python_api/test/test_get_header.py +49 -0
- real_ladybug-source/tools/python_api/test/test_helper.py +8 -0
- real_ladybug-source/tools/python_api/test/test_issue.py +147 -0
- real_ladybug-source/tools/python_api/test/test_iteration.py +96 -0
- real_ladybug-source/tools/python_api/test/test_networkx.py +437 -0
- real_ladybug-source/tools/python_api/test/test_parameter.py +340 -0
- real_ladybug-source/tools/python_api/test/test_prepared_statement.py +117 -0
- real_ladybug-source/tools/python_api/test/test_query_result.py +54 -0
- real_ladybug-source/tools/python_api/test/test_query_result_close.py +44 -0
- real_ladybug-source/tools/python_api/test/test_scan_pandas.py +676 -0
- real_ladybug-source/tools/python_api/test/test_scan_pandas_pyarrow.py +714 -0
- real_ladybug-source/tools/python_api/test/test_scan_polars.py +165 -0
- real_ladybug-source/tools/python_api/test/test_scan_pyarrow.py +167 -0
- real_ladybug-source/tools/python_api/test/test_timeout.py +11 -0
- real_ladybug-source/tools/python_api/test/test_torch_geometric.py +640 -0
- real_ladybug-source/tools/python_api/test/test_torch_geometric_remote_backend.py +111 -0
- real_ladybug-source/tools/python_api/test/test_udf.py +207 -0
- real_ladybug-source/tools/python_api/test/test_version.py +6 -0
- real_ladybug-source/tools/python_api/test/test_wal.py +80 -0
- real_ladybug-source/tools/python_api/test/type_aliases.py +10 -0
- real_ladybug-source/tools/rust_api/update_version.py +47 -0
- real_ladybug-source/tools/shell/test/conftest.py +218 -0
- real_ladybug-source/tools/shell/test/test_helper.py +60 -0
- real_ladybug-source/tools/shell/test/test_shell_basics.py +325 -0
- real_ladybug-source/tools/shell/test/test_shell_commands.py +656 -0
- real_ladybug-source/tools/shell/test/test_shell_control_edit.py +438 -0
- real_ladybug-source/tools/shell/test/test_shell_control_search.py +468 -0
- real_ladybug-source/tools/shell/test/test_shell_esc_edit.py +232 -0
- real_ladybug-source/tools/shell/test/test_shell_esc_search.py +162 -0
- real_ladybug-source/tools/shell/test/test_shell_flags.py +645 -0
|
@@ -0,0 +1,345 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import subprocess
|
|
3
|
+
import os
|
|
4
|
+
import shutil
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def create_worktree(path, commit, repo_root):
|
|
8
|
+
remove_worktree(path, repo_root)
|
|
9
|
+
run_command(f"git worktree add {path} {commit}", cwd=repo_root)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def remove_worktree(path, repo_root):
|
|
13
|
+
if os.path.exists(path):
|
|
14
|
+
run_command(f"git worktree remove --force {path}", cwd=repo_root, check=False)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def check_for_extension_build(makefile):
|
|
18
|
+
with open(makefile, "r") as f:
|
|
19
|
+
return any(line.strip() == "extension-build:" for line in f)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# Duplicates code from benchmark/version.py.
|
|
23
|
+
# Should be fine since the footprint is small, but any further extensions
|
|
24
|
+
# to this tool should rework this if the footprint for duplicated code gets
|
|
25
|
+
# bigger.
|
|
26
|
+
def get_version(lbug_root):
|
|
27
|
+
cmake_file = os.path.join(lbug_root, "CMakeLists.txt")
|
|
28
|
+
with open(cmake_file) as f:
|
|
29
|
+
for line in f:
|
|
30
|
+
if line.startswith("project(Lbug VERSION"):
|
|
31
|
+
return line.split(" ")[2].strip()
|
|
32
|
+
return "0"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def run_command(cmd, cwd=None, capture_output=False, check=True):
|
|
36
|
+
print(f"> Running: {cmd} (cwd={cwd})")
|
|
37
|
+
|
|
38
|
+
# We redirect stdin to devnull in an attempt
|
|
39
|
+
# to stop the process from interfering with the terminal's input buffer.
|
|
40
|
+
# This needs some work. After running the script I found that my buffer
|
|
41
|
+
# was filled with a sequence like 8;1R8;1R8;1R8;1R8;... This doesn't seem
|
|
42
|
+
# to affect the script but is annoying.
|
|
43
|
+
result = subprocess.run(
|
|
44
|
+
cmd,
|
|
45
|
+
cwd=cwd,
|
|
46
|
+
text=True,
|
|
47
|
+
capture_output=capture_output,
|
|
48
|
+
check=check,
|
|
49
|
+
shell=True,
|
|
50
|
+
stdin=subprocess.DEVNULL,
|
|
51
|
+
)
|
|
52
|
+
if capture_output:
|
|
53
|
+
return result.stdout.strip()
|
|
54
|
+
else:
|
|
55
|
+
print(result.stdout or "")
|
|
56
|
+
return None
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def export_datasets_and_test(
|
|
60
|
+
lbug_root,
|
|
61
|
+
base_worktree,
|
|
62
|
+
test_worktree,
|
|
63
|
+
dataset_dir,
|
|
64
|
+
output_dir,
|
|
65
|
+
cleanup,
|
|
66
|
+
export_path,
|
|
67
|
+
):
|
|
68
|
+
version = get_version(base_worktree)
|
|
69
|
+
if version == "0":
|
|
70
|
+
raise Exception("Failed to determine version. Aborting.")
|
|
71
|
+
export_path = os.path.abspath(os.path.join(output_dir, version))
|
|
72
|
+
|
|
73
|
+
if not os.path.exists(export_path + os.sep):
|
|
74
|
+
# Also build the `json` extension, which is needed for some datasets, like tinysnb_json.
|
|
75
|
+
if check_for_extension_build(
|
|
76
|
+
os.path.abspath(os.path.join(base_worktree, "Makefile"))
|
|
77
|
+
):
|
|
78
|
+
run_command("make extension-build EXTENSION_LIST=json", cwd=base_worktree)
|
|
79
|
+
# Older Makefiles do not have the `extension-build` rule
|
|
80
|
+
else:
|
|
81
|
+
run_command(
|
|
82
|
+
"make extension-test-build EXTENSION_LIST=json", cwd=base_worktree
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
# Use '_inprogress' as a temporary suffix to store the exports
|
|
86
|
+
# and atomically rename to `export_path` after all exports are successful.
|
|
87
|
+
# Avoids partial exports.
|
|
88
|
+
inprogress_path = f"{export_path}_inprogress" + os.sep
|
|
89
|
+
export_script_path = os.path.join(lbug_root, "scripts", "export-dbs.py")
|
|
90
|
+
exec_path = os.path.join(
|
|
91
|
+
base_worktree, "build", "relwithdebinfo", "tools", "shell", "lbug"
|
|
92
|
+
)
|
|
93
|
+
run_command(
|
|
94
|
+
f"""python3 {export_script_path} \
|
|
95
|
+
--executable {exec_path} \
|
|
96
|
+
--dataset-dir {dataset_dir} \
|
|
97
|
+
--output-dir {inprogress_path}""",
|
|
98
|
+
cwd=lbug_root,
|
|
99
|
+
)
|
|
100
|
+
os.rename(inprogress_path, export_path + os.sep)
|
|
101
|
+
|
|
102
|
+
# Append `/` so that datasets can be found correctly
|
|
103
|
+
os.environ["E2E_IMPORT_DB_DIR"] = export_path + os.sep
|
|
104
|
+
run_command("make test", cwd=test_worktree)
|
|
105
|
+
return 0
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def write_split_testfile(
|
|
109
|
+
export_dir, import_dir, case_name, header, export_lines, import_lines, db_dir
|
|
110
|
+
):
|
|
111
|
+
export_path = os.path.join(export_dir, f"{case_name}.test")
|
|
112
|
+
import_path = os.path.join(import_dir, f"{case_name}.test")
|
|
113
|
+
os.makedirs(export_dir, exist_ok=True)
|
|
114
|
+
os.makedirs(import_dir, exist_ok=True)
|
|
115
|
+
|
|
116
|
+
def replace_placeholders(lines):
|
|
117
|
+
return [
|
|
118
|
+
line.replace("${LBUG_EXPORT_DB_DIRECTORY}", os.path.join(db_dir, ""))
|
|
119
|
+
for line in lines
|
|
120
|
+
]
|
|
121
|
+
|
|
122
|
+
# Copying the dataset here is unnecessary as the tests use exported dbs.
|
|
123
|
+
def transform_import_header(header):
|
|
124
|
+
new_lines = []
|
|
125
|
+
for line in header.splitlines(keepends=True):
|
|
126
|
+
if line.startswith("-DATASET"):
|
|
127
|
+
line = "-DATASET CSV empty\n"
|
|
128
|
+
new_lines.append(line)
|
|
129
|
+
return "".join(new_lines)
|
|
130
|
+
|
|
131
|
+
# This is to handle an issue where the exported db seems to be
|
|
132
|
+
# deleted making the import fail. DBs are still imported with the line
|
|
133
|
+
# -STATEMENT IMPORT DATABASE ...
|
|
134
|
+
def transform_import_lines(lines):
|
|
135
|
+
result = []
|
|
136
|
+
for line in lines:
|
|
137
|
+
if line.startswith("-IMPORT_DATABASE"):
|
|
138
|
+
continue
|
|
139
|
+
result.append(line)
|
|
140
|
+
return result
|
|
141
|
+
|
|
142
|
+
with open(export_path, "w") as f:
|
|
143
|
+
f.write(header.replace("${LBUG_EXPORT_DB_DIRECTORY}", os.path.join(db_dir, "")))
|
|
144
|
+
f.writelines(replace_placeholders(export_lines))
|
|
145
|
+
|
|
146
|
+
with open(import_path, "w") as f:
|
|
147
|
+
f.write(
|
|
148
|
+
transform_import_header(
|
|
149
|
+
header.replace("${LBUG_EXPORT_DB_DIRECTORY}", os.path.join(db_dir, ""))
|
|
150
|
+
)
|
|
151
|
+
)
|
|
152
|
+
f.writelines(transform_import_lines(replace_placeholders(import_lines)))
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def split_tests(root, output_dir, file, db_dir):
|
|
156
|
+
relative_path = os.path.relpath(file.name, root)
|
|
157
|
+
base_path = os.path.splitext(relative_path)[0]
|
|
158
|
+
export_dir = os.path.abspath(os.path.join(output_dir, "export", base_path))
|
|
159
|
+
import_dir = os.path.abspath(os.path.join(output_dir, "import", base_path))
|
|
160
|
+
|
|
161
|
+
header = ""
|
|
162
|
+
header_parsed = False
|
|
163
|
+
current_case = None
|
|
164
|
+
export_lines = []
|
|
165
|
+
import_lines = []
|
|
166
|
+
in_case = False
|
|
167
|
+
in_import = False
|
|
168
|
+
for line in file:
|
|
169
|
+
line = line.rstrip("\n")
|
|
170
|
+
if not header_parsed:
|
|
171
|
+
header += line + "\n"
|
|
172
|
+
if line.strip() == "--":
|
|
173
|
+
header_parsed = True
|
|
174
|
+
continue
|
|
175
|
+
if line.startswith("-CASE"):
|
|
176
|
+
# Write the previous case, if it had a split.
|
|
177
|
+
if in_case and in_import:
|
|
178
|
+
write_split_testfile(
|
|
179
|
+
export_dir,
|
|
180
|
+
import_dir,
|
|
181
|
+
current_case,
|
|
182
|
+
header,
|
|
183
|
+
export_lines,
|
|
184
|
+
import_lines,
|
|
185
|
+
db_dir,
|
|
186
|
+
)
|
|
187
|
+
export_lines = []
|
|
188
|
+
import_lines = []
|
|
189
|
+
in_import = False
|
|
190
|
+
current_case = line[len("-CASE") :].strip()
|
|
191
|
+
in_case = True
|
|
192
|
+
export_lines.append(line + "\n")
|
|
193
|
+
import_lines.append(line + "\n")
|
|
194
|
+
continue
|
|
195
|
+
if line.startswith("#EXPORT_IMPORT_TEST_SPLIT"):
|
|
196
|
+
in_import = True
|
|
197
|
+
continue
|
|
198
|
+
if in_case:
|
|
199
|
+
if in_import:
|
|
200
|
+
import_lines.append(line + "\n")
|
|
201
|
+
else:
|
|
202
|
+
export_lines.append(line + "\n")
|
|
203
|
+
# Handle the last case of a file.
|
|
204
|
+
if current_case and in_import:
|
|
205
|
+
write_split_testfile(
|
|
206
|
+
export_dir,
|
|
207
|
+
import_dir,
|
|
208
|
+
current_case,
|
|
209
|
+
header,
|
|
210
|
+
export_lines,
|
|
211
|
+
import_lines,
|
|
212
|
+
db_dir,
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def split_files(test_dir, output_dir):
|
|
217
|
+
db_dir = os.path.abspath(os.path.join(output_dir, "db"))
|
|
218
|
+
os.makedirs(db_dir, exist_ok=True)
|
|
219
|
+
for root, dirs, files in os.walk(test_dir):
|
|
220
|
+
for file in files:
|
|
221
|
+
full_path = os.path.join(root, file)
|
|
222
|
+
with open(full_path, "r") as f:
|
|
223
|
+
split_tests(test_dir, output_dir, f, db_dir)
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def run_export_specific_tests(
|
|
227
|
+
lbug_root, base_worktree, test_worktree, test_dir, output_dir, cleanup
|
|
228
|
+
):
|
|
229
|
+
# Split tests in test_dir
|
|
230
|
+
split_files(test_dir, output_dir)
|
|
231
|
+
# Build base_worktree lbug
|
|
232
|
+
run_command("make test-build", cwd=base_worktree)
|
|
233
|
+
# Run the export tests.
|
|
234
|
+
run_command(
|
|
235
|
+
f"E2E_TEST_FILES_DIRECTORY='.' ./.worktree-base/build/relwithdebinfo/test/runner/e2e_test {os.path.abspath(os.path.join(output_dir, 'export'))}",
|
|
236
|
+
cwd=lbug_root,
|
|
237
|
+
check=False,
|
|
238
|
+
)
|
|
239
|
+
# Build test_worktree lbug
|
|
240
|
+
run_command("make test-build", cwd=test_worktree)
|
|
241
|
+
# Run the import tests.
|
|
242
|
+
run_command(
|
|
243
|
+
f"E2E_TEST_FILES_DIRECTORY='.' ./.worktree-test/build/relwithdebinfo/test/runner/e2e_test {os.path.abspath(os.path.join(output_dir, 'import'))}",
|
|
244
|
+
cwd=lbug_root,
|
|
245
|
+
check=False,
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def main():
|
|
250
|
+
base_worktree = None
|
|
251
|
+
test_worktree = None
|
|
252
|
+
export_path = None
|
|
253
|
+
cleanup = None
|
|
254
|
+
|
|
255
|
+
parser = argparse.ArgumentParser(
|
|
256
|
+
description="Export DBs from dataset-dir to output-dir using base-commit and test in test-commit"
|
|
257
|
+
)
|
|
258
|
+
parser.add_argument(
|
|
259
|
+
"--base-commit", required=True, help="Git commit to export databases from"
|
|
260
|
+
)
|
|
261
|
+
parser.add_argument(
|
|
262
|
+
"--test-commit", required=True, help="Git commit to test against"
|
|
263
|
+
)
|
|
264
|
+
|
|
265
|
+
parser.add_argument("--dataset-dir", help="Path to the dataset directory")
|
|
266
|
+
parser.add_argument("--test-dir", help="Path to the test directory")
|
|
267
|
+
|
|
268
|
+
parser.add_argument(
|
|
269
|
+
"--output-dir", required=True, help="Path to output the exported databases"
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
mutually_exclusive_args = parser.add_mutually_exclusive_group()
|
|
273
|
+
mutually_exclusive_args.add_argument(
|
|
274
|
+
"--cleanup",
|
|
275
|
+
dest="cleanup",
|
|
276
|
+
action="store_true",
|
|
277
|
+
help="Delete exported DBs after test",
|
|
278
|
+
)
|
|
279
|
+
mutually_exclusive_args.add_argument(
|
|
280
|
+
"--no-cleanup",
|
|
281
|
+
dest="cleanup",
|
|
282
|
+
action="store_false",
|
|
283
|
+
help="Do not delete exported DBs after test",
|
|
284
|
+
)
|
|
285
|
+
parser.set_defaults(cleanup=True)
|
|
286
|
+
|
|
287
|
+
try:
|
|
288
|
+
args = parser.parse_args()
|
|
289
|
+
|
|
290
|
+
if bool(args.dataset_dir) == bool(args.test_dir):
|
|
291
|
+
raise Exception(
|
|
292
|
+
"You must provide exactly one of --dataset-dir or --test-dir."
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
base_commit = args.base_commit
|
|
296
|
+
test_commit = args.test_commit
|
|
297
|
+
output_dir = args.output_dir
|
|
298
|
+
cleanup = args.cleanup
|
|
299
|
+
|
|
300
|
+
script_dir = os.path.dirname(os.path.realpath(__file__))
|
|
301
|
+
lbug_root = os.path.abspath(os.path.join(script_dir, ".."))
|
|
302
|
+
base_worktree = os.path.join(lbug_root, ".worktree-base")
|
|
303
|
+
test_worktree = os.path.join(lbug_root, ".worktree-test")
|
|
304
|
+
create_worktree(base_worktree, base_commit, lbug_root)
|
|
305
|
+
create_worktree(test_worktree, test_commit, lbug_root)
|
|
306
|
+
|
|
307
|
+
if bool(args.dataset_dir):
|
|
308
|
+
export_datasets_and_test(
|
|
309
|
+
lbug_root,
|
|
310
|
+
base_worktree,
|
|
311
|
+
test_worktree,
|
|
312
|
+
os.path.abspath(args.dataset_dir),
|
|
313
|
+
output_dir,
|
|
314
|
+
cleanup,
|
|
315
|
+
export_path,
|
|
316
|
+
)
|
|
317
|
+
else:
|
|
318
|
+
assert bool(args.test_dir)
|
|
319
|
+
export_path = output_dir
|
|
320
|
+
run_export_specific_tests(
|
|
321
|
+
lbug_root,
|
|
322
|
+
base_worktree,
|
|
323
|
+
test_worktree,
|
|
324
|
+
os.path.abspath(args.test_dir),
|
|
325
|
+
output_dir,
|
|
326
|
+
cleanup,
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
finally:
|
|
330
|
+
if cleanup and export_path and os.path.exists(export_path):
|
|
331
|
+
print(f"Cleaning up export directory: {export_path}")
|
|
332
|
+
shutil.rmtree(export_path)
|
|
333
|
+
else:
|
|
334
|
+
print(f"Skipping cleaning up export directory: {export_path}")
|
|
335
|
+
|
|
336
|
+
if base_worktree or test_worktree:
|
|
337
|
+
print("Removing worktrees")
|
|
338
|
+
if base_worktree:
|
|
339
|
+
remove_worktree(base_worktree, lbug_root)
|
|
340
|
+
if test_worktree:
|
|
341
|
+
remove_worktree(test_worktree, lbug_root)
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
if __name__ == "__main__":
|
|
345
|
+
main()
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import pathlib
|
|
2
|
+
import os
|
|
3
|
+
from packaging.version import Version
|
|
4
|
+
|
|
5
|
+
CURRENT_DIR = pathlib.Path(__file__).parent.resolve()
|
|
6
|
+
|
|
7
|
+
RELEASES_PATH = CURRENT_DIR.joinpath('releases').resolve()
|
|
8
|
+
|
|
9
|
+
production_releases = open(CURRENT_DIR.joinpath('PRODUCTION_RELEASES')).read().splitlines()
|
|
10
|
+
|
|
11
|
+
releases_to_purge = [r for r in os.listdir(RELEASES_PATH) if r.startswith('v')]
|
|
12
|
+
|
|
13
|
+
releases_to_purge = [r for r in releases_to_purge if r not in production_releases]
|
|
14
|
+
releases_to_purge = [r[1:] for r in releases_to_purge]
|
|
15
|
+
releases_to_purge.sort(key=Version)
|
|
16
|
+
|
|
17
|
+
releases_to_purge.pop()
|
|
18
|
+
|
|
19
|
+
releases_to_purge = ['v' + r for r in releases_to_purge]
|
|
20
|
+
|
|
21
|
+
if len(releases_to_purge) == 0:
|
|
22
|
+
print('No releases to purge.')
|
|
23
|
+
exit(0)
|
|
24
|
+
|
|
25
|
+
print('Releases to purge:')
|
|
26
|
+
for r in releases_to_purge:
|
|
27
|
+
print(' ' + r)
|
|
28
|
+
|
|
29
|
+
for r in releases_to_purge:
|
|
30
|
+
path_to_purge = RELEASES_PATH.joinpath(r)
|
|
31
|
+
print('Deleting ' + str(path_to_purge))
|
|
32
|
+
os.system('rm -rf ' + str(path_to_purge))
|
|
33
|
+
|
|
34
|
+
print('Done.')
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
import shutil
|
|
4
|
+
import logging
|
|
5
|
+
from hashlib import md5
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
logging.basicConfig(level=logging.DEBUG)
|
|
9
|
+
|
|
10
|
+
HEADER_BASE_PATH = os.path.realpath(
|
|
11
|
+
os.path.join(os.path.dirname(__file__), '../../src/include'))
|
|
12
|
+
MAIN_HEADER_PATH = os.path.realpath(
|
|
13
|
+
os.path.join(HEADER_BASE_PATH, 'main'))
|
|
14
|
+
START_POINT = os.path.realpath(
|
|
15
|
+
os.path.join(MAIN_HEADER_PATH, 'lbug.h')
|
|
16
|
+
)
|
|
17
|
+
JSON_HEADER_PATH = os.path.realpath(
|
|
18
|
+
os.path.join(os.path.dirname(__file__), '../../third_party/nlohmann_json/json_fwd.hpp'))
|
|
19
|
+
HEADER_TARGET_PATH = os.path.realpath(
|
|
20
|
+
os.path.join(os.path.dirname(__file__), 'headers')
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
logging.debug('HEADER_BASE_PATH: %s', HEADER_BASE_PATH)
|
|
24
|
+
logging.debug('MAIN_HEADER_PATH: %s', MAIN_HEADER_PATH)
|
|
25
|
+
logging.debug('START_POINT: %s', START_POINT)
|
|
26
|
+
logging.debug('JSON_HEADER_PATH: %s', JSON_HEADER_PATH)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def collect_header_file_path_recurse(start_point):
|
|
30
|
+
global processed_header_files
|
|
31
|
+
global header_map
|
|
32
|
+
if start_point in processed_header_files:
|
|
33
|
+
return []
|
|
34
|
+
curr_header_files = []
|
|
35
|
+
with open(start_point, 'r') as f:
|
|
36
|
+
for line in f.readlines():
|
|
37
|
+
if not line.startswith('#include "'):
|
|
38
|
+
continue
|
|
39
|
+
header_path = os.path.normpath(line.split('"')[1])
|
|
40
|
+
header_real_path = None
|
|
41
|
+
# Special case for json_fwd.hpp
|
|
42
|
+
if header_path == 'json_fwd.hpp':
|
|
43
|
+
header_real_path = JSON_HEADER_PATH
|
|
44
|
+
logging.debug('Found header: %s at %s',
|
|
45
|
+
header_path, header_real_path)
|
|
46
|
+
else:
|
|
47
|
+
# Check if the header is in the current directory
|
|
48
|
+
start_point_dir = os.path.dirname(start_point)
|
|
49
|
+
header_real_path = os.path.join(
|
|
50
|
+
start_point_dir, header_path)
|
|
51
|
+
if os.path.exists(header_real_path):
|
|
52
|
+
logging.debug('Found header: %s at %s',
|
|
53
|
+
header_path, header_real_path)
|
|
54
|
+
else:
|
|
55
|
+
# Check if the header is in the include directory
|
|
56
|
+
header_real_path = os.path.join(
|
|
57
|
+
HEADER_BASE_PATH, header_path)
|
|
58
|
+
if os.path.exists(header_real_path):
|
|
59
|
+
logging.debug('Found header: %s at %s',
|
|
60
|
+
header_path, header_real_path)
|
|
61
|
+
if header_real_path is None:
|
|
62
|
+
logging.error('Could not find header: %s', header_path)
|
|
63
|
+
sys.exit(1)
|
|
64
|
+
curr_header_files.append(header_real_path)
|
|
65
|
+
if start_point not in header_map:
|
|
66
|
+
header_map[start_point] = {}
|
|
67
|
+
header_map[start_point][header_path] = header_real_path
|
|
68
|
+
for header_file in curr_header_files:
|
|
69
|
+
curr_header_files += collect_header_file_path_recurse(header_file)
|
|
70
|
+
|
|
71
|
+
processed_header_files.add(start_point)
|
|
72
|
+
return curr_header_files
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def collect_header_file_paths():
|
|
76
|
+
global processed_header_files
|
|
77
|
+
global header_map
|
|
78
|
+
processed_header_files = set()
|
|
79
|
+
header_map = {}
|
|
80
|
+
collect_header_file_path_recurse(START_POINT)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def copy_header(header_real_path):
|
|
84
|
+
global copied_headers
|
|
85
|
+
if header_real_path in copied_headers:
|
|
86
|
+
return copied_headers[header_real_path]
|
|
87
|
+
header_name = os.path.basename(header_real_path)
|
|
88
|
+
# Rename the header if it is already copied
|
|
89
|
+
if os.path.exists(os.path.join(HEADER_TARGET_PATH, header_name)):
|
|
90
|
+
header_name = md5(header_real_path.encode()).hexdigest() + '.h'
|
|
91
|
+
target_path = os.path.join(HEADER_TARGET_PATH, header_name)
|
|
92
|
+
shutil.copyfile(header_real_path, target_path)
|
|
93
|
+
copied_headers[header_real_path] = header_name
|
|
94
|
+
logging.debug('Copied header: %s to %s', header_real_path, target_path)
|
|
95
|
+
return header_name
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def copy_headers():
|
|
99
|
+
global header_map
|
|
100
|
+
global copied_headers
|
|
101
|
+
copied_headers = {}
|
|
102
|
+
if os.path.exists(HEADER_TARGET_PATH):
|
|
103
|
+
shutil.rmtree(HEADER_TARGET_PATH, ignore_errors=True)
|
|
104
|
+
os.makedirs(HEADER_TARGET_PATH)
|
|
105
|
+
for src_header in header_map:
|
|
106
|
+
src_header_copied_name = copy_header(src_header)
|
|
107
|
+
src_header_copied_path = os.path.join(
|
|
108
|
+
HEADER_TARGET_PATH, src_header_copied_name)
|
|
109
|
+
file = Path(src_header_copied_path)
|
|
110
|
+
for original_header_path in header_map[src_header]:
|
|
111
|
+
header_real_path = header_map[src_header][original_header_path]
|
|
112
|
+
header_name = copy_header(header_real_path)
|
|
113
|
+
file.write_text(file.read_text().replace(
|
|
114
|
+
original_header_path, header_name))
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
if __name__ == '__main__':
|
|
118
|
+
logging.info('Collecting header files...')
|
|
119
|
+
collect_header_file_paths()
|
|
120
|
+
logging.info('Copying header files...')
|
|
121
|
+
copy_headers()
|
|
122
|
+
logging.info('Done!')
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import subprocess
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
LBUG_ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
|
7
|
+
# Datasets can only be copied from the root since copy.schema contains relative paths
|
|
8
|
+
os.chdir(LBUG_ROOT)
|
|
9
|
+
|
|
10
|
+
# Define the build type from input
|
|
11
|
+
if len(sys.argv) > 1 and sys.argv[1].lower() == "release":
|
|
12
|
+
build_type = "release"
|
|
13
|
+
else:
|
|
14
|
+
build_type = "relwithdebinfo"
|
|
15
|
+
|
|
16
|
+
# Change the current working directory
|
|
17
|
+
if os.path.exists(f"{LBUG_ROOT}/dataset/databases/tinysnb"):
|
|
18
|
+
shutil.rmtree(f"{LBUG_ROOT}/dataset/databases/tinysnb")
|
|
19
|
+
if sys.platform == "win32":
|
|
20
|
+
lbug_shell_path = f"{LBUG_ROOT}/build/{build_type}/src/lbug_shell"
|
|
21
|
+
else:
|
|
22
|
+
lbug_shell_path = f"{LBUG_ROOT}/build/{build_type}/tools/shell/lbug"
|
|
23
|
+
subprocess.check_call(
|
|
24
|
+
[
|
|
25
|
+
"python3",
|
|
26
|
+
f"{LBUG_ROOT}/benchmark/serializer.py",
|
|
27
|
+
"TinySNB",
|
|
28
|
+
f"{LBUG_ROOT}/dataset/tinysnb",
|
|
29
|
+
f"{LBUG_ROOT}/dataset/databases/tinysnb",
|
|
30
|
+
"--single-thread",
|
|
31
|
+
"--lbug-shell",
|
|
32
|
+
lbug_shell_path,
|
|
33
|
+
]
|
|
34
|
+
)
|